file_name
stringlengths 5
52
| name
stringlengths 4
95
| original_source_type
stringlengths 0
23k
| source_type
stringlengths 9
23k
| source_definition
stringlengths 9
57.9k
| source
dict | source_range
dict | file_context
stringlengths 0
721k
| dependencies
dict | opens_and_abbrevs
listlengths 2
94
| vconfig
dict | interleaved
bool 1
class | verbose_type
stringlengths 1
7.42k
| effect
stringclasses 118
values | effect_flags
sequencelengths 0
2
| mutual_with
sequencelengths 0
11
| ideal_premises
sequencelengths 0
236
| proof_features
sequencelengths 0
1
| is_simple_lemma
bool 2
classes | is_div
bool 2
classes | is_proof
bool 2
classes | is_simply_typed
bool 2
classes | is_type
bool 2
classes | partial_definition
stringlengths 5
3.99k
| completed_definiton
stringlengths 1
1.63M
| isa_cross_project_example
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.loc_union_idem_2 | val loc_union_idem_2 (s1 s2: loc)
: Lemma (loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)] | val loc_union_idem_2 (s1 s2: loc)
: Lemma (loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)] | let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 26,
"end_line": 451,
"start_col": 0,
"start_line": 446
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s1: LowStar.Monotonic.Buffer.loc -> s2: LowStar.Monotonic.Buffer.loc
-> FStar.Pervasives.Lemma
(ensures
LowStar.Monotonic.Buffer.loc_union (LowStar.Monotonic.Buffer.loc_union s1 s2) s2 ==
LowStar.Monotonic.Buffer.loc_union s1 s2)
[SMTPat (LowStar.Monotonic.Buffer.loc_union (LowStar.Monotonic.Buffer.loc_union s1 s2) s2)] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowStar.Monotonic.Buffer.loc",
"LowStar.Monotonic.Buffer.loc_union_assoc",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"LowStar.Monotonic.Buffer.loc_union",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | true | false | true | false | false | let loc_union_idem_2 (s1 s2: loc)
: Lemma (loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)] =
| loc_union_assoc s1 s2 s2 | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.all_disjoint | val all_disjoint (l: list loc) : Type0 | val all_disjoint (l: list loc) : Type0 | let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 36,
"end_line": 1005,
"start_col": 0,
"start_line": 1004
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"] | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | l: Prims.list LowStar.Monotonic.Buffer.loc -> Type0 | Prims.Tot | [
"total"
] | [] | [
"Prims.list",
"LowStar.Monotonic.Buffer.loc",
"FStar.BigOps.pairwise_and",
"LowStar.Monotonic.Buffer.loc_disjoint"
] | [] | false | false | false | true | true | let all_disjoint (l: list loc) : Type0 =
| BigOps.pairwise_and loc_disjoint l | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.buf | val buf (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) : buf_t | val buf (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) : buf_t | let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|) | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 91,
"end_line": 991,
"start_col": 0,
"start_line": 991
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> LowStar.Monotonic.Buffer.buf_t | Prims.Tot | [
"total"
] | [] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Pervasives.Mkdtuple4",
"LowStar.Monotonic.Buffer.buf_t"
] | [] | false | false | false | false | false | let buf (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) : buf_t =
| (| a, rrel, rel, b |) | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.loc_addr_of_buffer | val loc_addr_of_buffer (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) : GTot loc | val loc_addr_of_buffer (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) : GTot loc | let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b)) | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 61,
"end_line": 536,
"start_col": 7,
"start_line": 535
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> Prims.GTot LowStar.Monotonic.Buffer.loc | Prims.GTot | [
"sometrivial"
] | [] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.loc_addresses",
"LowStar.Monotonic.Buffer.frameOf",
"FStar.Set.singleton",
"Prims.nat",
"LowStar.Monotonic.Buffer.as_addr",
"LowStar.Monotonic.Buffer.loc"
] | [] | false | false | false | false | false | let loc_addr_of_buffer (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) : GTot loc =
| loc_addresses false (frameOf b) (Set.singleton (as_addr b)) | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.loc_mreference | val loc_mreference (#a: Type) (#p: Preorder.preorder a) (b: HS.mreference a p) : GTot loc | val loc_mreference (#a: Type) (#p: Preorder.preorder a) (b: HS.mreference a p) : GTot loc | let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b)) | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 66,
"end_line": 557,
"start_col": 0,
"start_line": 552
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``. | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b: FStar.Monotonic.HyperStack.mreference a p -> Prims.GTot LowStar.Monotonic.Buffer.loc | Prims.GTot | [
"sometrivial"
] | [] | [
"FStar.Preorder.preorder",
"FStar.Monotonic.HyperStack.mreference",
"LowStar.Monotonic.Buffer.loc_addresses",
"FStar.Monotonic.HyperStack.frameOf",
"FStar.Set.singleton",
"Prims.nat",
"FStar.Monotonic.HyperStack.as_addr",
"LowStar.Monotonic.Buffer.loc"
] | [] | false | false | false | false | false | let loc_mreference (#a: Type) (#p: Preorder.preorder a) (b: HS.mreference a p) : GTot loc =
| loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b)) | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.all_live | val all_live (h: HS.mem) (l: list buf_t) : Type0 | val all_live (h: HS.mem) (l: list buf_t) : Type0 | let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 60,
"end_line": 998,
"start_col": 0,
"start_line": 997
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"] | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | h: FStar.Monotonic.HyperStack.mem -> l: Prims.list LowStar.Monotonic.Buffer.buf_t -> Type0 | Prims.Tot | [
"total"
] | [] | [
"FStar.Monotonic.HyperStack.mem",
"Prims.list",
"LowStar.Monotonic.Buffer.buf_t",
"FStar.BigOps.big_and",
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.live"
] | [] | false | false | false | true | true | let all_live (h: HS.mem) (l: list buf_t) : Type0 =
| BigOps.big_and #buf_t (fun (| _ , _ , _ , b |) -> live h b) l | false |
Hacl.Spec.PrecompBaseTable256.fst | Hacl.Spec.PrecompBaseTable256.a_pow2_192_lemma | val a_pow2_192_lemma: #t:Type -> k:SE.concrete_ops t -> a:t ->
Lemma (k.SE.to.SE.refl (a_pow2_192 k a) ==
LE.pow k.SE.to.SE.comm_monoid (k.SE.to.SE.refl a) (pow2 192)) | val a_pow2_192_lemma: #t:Type -> k:SE.concrete_ops t -> a:t ->
Lemma (k.SE.to.SE.refl (a_pow2_192 k a) ==
LE.pow k.SE.to.SE.comm_monoid (k.SE.to.SE.refl a) (pow2 192)) | let a_pow2_192_lemma #t k a =
let cm = k.SE.to.SE.comm_monoid in
let refl = k.SE.to.SE.refl in
calc (==) {
refl (a_pow2_192 k a);
(==) { }
refl (SE.exp_pow2 k (a_pow2_128 k a) 64);
(==) { a_pow2_64_lemma k (a_pow2_128 k a) }
LE.pow cm (refl (a_pow2_128 k a)) (pow2 64);
(==) { a_pow2_128_lemma k a }
LE.pow cm (LE.pow cm (refl a) (pow2 128)) (pow2 64);
(==) { LE.lemma_pow_mul cm (refl a) (pow2 128) (pow2 64) }
LE.pow cm (refl a) (pow2 128 * pow2 64);
(==) { Math.Lemmas.pow2_plus 128 64 }
LE.pow cm (refl a) (pow2 192);
} | {
"file_name": "code/bignum/Hacl.Spec.PrecompBaseTable256.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 3,
"end_line": 143,
"start_col": 0,
"start_line": 128
} | module Hacl.Spec.PrecompBaseTable256
open FStar.Mul
open Lib.IntTypes
module LSeq = Lib.Sequence
module Loops = Lib.LoopCombinators
module LE = Lib.Exponentiation
module SE = Spec.Exponentiation
module BD = Hacl.Spec.Bignum.Definitions
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
let lemma_mod_pow2_sub x a b =
calc (==) {
x / pow2 a % pow2 b * pow2 a;
(==) { Math.Lemmas.pow2_modulo_division_lemma_1 x a (a + b) }
x % pow2 (a + b) / pow2 a * pow2 a;
(==) { Math.Lemmas.euclidean_division_definition (x % pow2 (a + b)) (pow2 a) }
x % pow2 (a + b) - x % pow2 (a + b) % pow2 a;
(==) { Math.Lemmas.pow2_modulo_modulo_lemma_1 x a (a + b) }
x % pow2 (a + b) - x % pow2 a;
}
let lemma_decompose_nat256_as_four_u64 x =
let x0 = x % pow2 64 in
let x1 = x / pow2 64 % pow2 64 in
let x2 = x / pow2 128 % pow2 64 in
let x3' = x / pow2 192 % pow2 64 in
Math.Lemmas.lemma_div_lt x 256 192;
Math.Lemmas.small_mod (x / pow2 192) (pow2 64);
let x3 = x / pow2 192 in
assert (x3 == x3');
calc (==) {
x0 + x1 * pow2 64 + x2 * pow2 128 + x3 * pow2 192;
(==) { }
x0 + x1 * pow2 64 + (x / pow2 128 % pow2 64) * pow2 128 + x / pow2 192 * pow2 192;
(==) { lemma_mod_pow2_sub x 128 64 }
x0 + x1 * pow2 64 + x % pow2 192 - x % pow2 128 + x / pow2 192 * pow2 192;
(==) { Math.Lemmas.euclidean_division_definition x (pow2 192) }
x0 + x1 * pow2 64 - x % pow2 128 + x;
(==) { lemma_mod_pow2_sub x 64 64 }
x;
}
let lemma_point_mul_base_precomp4 #t k a b =
let (b0, b1, b2, b3) = decompose_nat256_as_four_u64 b in
let a_pow2_64 = LE.pow k a (pow2 64) in
let a_pow2_128 = LE.pow k a (pow2 128) in
let a_pow2_192 = LE.pow k a (pow2 192) in
let res = LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 in
calc (==) {
LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4;
(==) { LE.exp_four_fw_lemma k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 }
k.LE.mul
(k.LE.mul
(k.LE.mul (LE.pow k a b0) (LE.pow k (LE.pow k a (pow2 64)) b1))
(LE.pow k a_pow2_128 b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_mul k a (pow2 64) b1 }
k.LE.mul
(k.LE.mul
(k.LE.mul (LE.pow k a b0) (LE.pow k a (b1 * pow2 64)))
(LE.pow k a_pow2_128 b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_add k a b0 (b1 * pow2 64) }
k.LE.mul
(k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64))
(LE.pow k (LE.pow k a (pow2 128)) b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_mul k a (pow2 128) b2 }
k.LE.mul
(k.LE.mul (LE.pow k a (b0 + b1 * pow2 64)) (LE.pow k a (b2 * pow2 128)))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_add k a (b0 + b1 * pow2 64) (b2 * pow2 128) }
k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128))
(LE.pow k (LE.pow k a (pow2 192)) b3);
(==) { LE.lemma_pow_mul k a (pow2 192) b3 }
k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128))
(LE.pow k a (b3 * pow2 192));
(==) { LE.lemma_pow_add k a (b0 + b1 * pow2 64 + b2 * pow2 128) (b3 * pow2 192) }
LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128 + b3 * pow2 192);
(==) { lemma_decompose_nat256_as_four_u64 b }
LE.pow k a b;
}
//-----------------------
#push-options "--fuel 2"
let rec exp_pow2_rec_is_exp_pow2 #t k a b =
if b = 0 then Lib.LoopCombinators.eq_repeat0 k.sqr a
else begin
Lib.LoopCombinators.unfold_repeat b k.sqr a (b - 1);
assert (Loops.repeat b k.sqr a == k.sqr (Loops.repeat (b - 1) k.sqr a));
exp_pow2_rec_is_exp_pow2 k a (b - 1) end
#pop-options
let a_pow2_64_lemma #t k a =
SE.exp_pow2_lemma k a 64;
LE.exp_pow2_lemma k.SE.to.SE.comm_monoid (k.SE.to.SE.refl a) 64
let a_pow2_128_lemma #t k a =
let cm = k.SE.to.SE.comm_monoid in
let refl = k.SE.to.SE.refl in
calc (==) {
refl (a_pow2_128 k a);
(==) { }
refl (SE.exp_pow2 k (a_pow2_64 k a) 64);
(==) { a_pow2_64_lemma k (a_pow2_64 k a) }
LE.pow cm (refl (a_pow2_64 k a)) (pow2 64);
(==) { a_pow2_64_lemma k a }
LE.pow cm (LE.pow cm (refl a) (pow2 64)) (pow2 64);
(==) { LE.lemma_pow_mul cm (refl a) (pow2 64) (pow2 64) }
LE.pow cm (refl a) (pow2 64 * pow2 64);
(==) { Math.Lemmas.pow2_plus 64 64 }
LE.pow cm (refl a) (pow2 128);
} | {
"checked_file": "/",
"dependencies": [
"Spec.Exponentiation.fsti.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Exponentiation.fsti.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": true,
"source_file": "Hacl.Spec.PrecompBaseTable256.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loops"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "Spec.Exponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation",
"short_module": "LE"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | k: Spec.Exponentiation.concrete_ops t -> a: t
-> FStar.Pervasives.Lemma
(ensures
Mkto_comm_monoid?.refl (Mkconcrete_ops?.to k) (Hacl.Spec.PrecompBaseTable256.a_pow2_192 k a) ==
Lib.Exponentiation.Definition.pow (Mkto_comm_monoid?.comm_monoid (Mkconcrete_ops?.to k))
(Mkto_comm_monoid?.refl (Mkconcrete_ops?.to k) a)
(Prims.pow2 192)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Spec.Exponentiation.concrete_ops",
"FStar.Calc.calc_finish",
"Spec.Exponentiation.__proj__Mkto_comm_monoid__item__a_spec",
"Spec.Exponentiation.__proj__Mkconcrete_ops__item__to",
"Prims.eq2",
"Hacl.Spec.PrecompBaseTable256.a_pow2_192",
"Lib.Exponentiation.Definition.pow",
"Prims.pow2",
"Prims.Cons",
"FStar.Preorder.relation",
"Prims.Nil",
"Prims.unit",
"FStar.Calc.calc_step",
"FStar.Mul.op_Star",
"Hacl.Spec.PrecompBaseTable256.a_pow2_128",
"Spec.Exponentiation.exp_pow2",
"FStar.Calc.calc_init",
"FStar.Calc.calc_pack",
"Prims.squash",
"Hacl.Spec.PrecompBaseTable256.a_pow2_64_lemma",
"Hacl.Spec.PrecompBaseTable256.a_pow2_128_lemma",
"Lib.Exponentiation.Definition.lemma_pow_mul",
"FStar.Math.Lemmas.pow2_plus",
"Spec.Exponentiation.__proj__Mkto_comm_monoid__item__refl",
"Lib.Exponentiation.Definition.comm_monoid",
"Spec.Exponentiation.__proj__Mkto_comm_monoid__item__comm_monoid"
] | [] | false | false | true | false | false | let a_pow2_192_lemma #t k a =
| let cm = k.SE.to.SE.comm_monoid in
let refl = k.SE.to.SE.refl in
calc ( == ) {
refl (a_pow2_192 k a);
( == ) { () }
refl (SE.exp_pow2 k (a_pow2_128 k a) 64);
( == ) { a_pow2_64_lemma k (a_pow2_128 k a) }
LE.pow cm (refl (a_pow2_128 k a)) (pow2 64);
( == ) { a_pow2_128_lemma k a }
LE.pow cm (LE.pow cm (refl a) (pow2 128)) (pow2 64);
( == ) { LE.lemma_pow_mul cm (refl a) (pow2 128) (pow2 64) }
LE.pow cm (refl a) (pow2 128 * pow2 64);
( == ) { Math.Lemmas.pow2_plus 128 64 }
LE.pow cm (refl a) (pow2 192);
} | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.loc_includes_union_r' | val loc_includes_union_r' (s s1 s2: loc)
: Lemma (loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))] | val loc_includes_union_r' (s s1 s2: loc)
: Lemma (loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))] | let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 69,
"end_line": 654,
"start_col": 0,
"start_line": 645
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s: LowStar.Monotonic.Buffer.loc ->
s1: LowStar.Monotonic.Buffer.loc ->
s2: LowStar.Monotonic.Buffer.loc
-> FStar.Pervasives.Lemma
(ensures
LowStar.Monotonic.Buffer.loc_includes s (LowStar.Monotonic.Buffer.loc_union s1 s2) <==>
LowStar.Monotonic.Buffer.loc_includes s s1 /\ LowStar.Monotonic.Buffer.loc_includes s s2)
[SMTPat (LowStar.Monotonic.Buffer.loc_includes s (LowStar.Monotonic.Buffer.loc_union s1 s2))] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Classical.move_requires",
"Prims.l_and",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_union",
"LowStar.Monotonic.Buffer.loc_includes_trans",
"Prims.unit",
"Prims.l_or",
"LowStar.Monotonic.Buffer.loc_includes_union_l",
"LowStar.Monotonic.Buffer.loc_includes_union_r",
"Prims.l_True",
"Prims.squash",
"Prims.l_iff",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | false | false | true | false | false | let loc_includes_union_r' (s s1 s2: loc)
: Lemma (loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))] =
| Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2 | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.buf_t | val buf_t : Type | let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 67,
"end_line": 987,
"start_col": 0,
"start_line": 987
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Type | Prims.Tot | [
"total"
] | [] | [
"FStar.Pervasives.dtuple4",
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer"
] | [] | false | false | false | true | true | let buf_t =
| a: Type0 & rrel: srel a & rel: srel a & mbuffer a rrel rel | false |
|
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.loc_freed_mreference | val loc_freed_mreference (#a: Type) (#p: Preorder.preorder a) (b: HS.mreference a p) : GTot loc | val loc_freed_mreference (#a: Type) (#p: Preorder.preorder a) (b: HS.mreference a p) : GTot loc | let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b)) | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 67,
"end_line": 565,
"start_col": 0,
"start_line": 560
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b: FStar.Monotonic.HyperStack.mreference a p -> Prims.GTot LowStar.Monotonic.Buffer.loc | Prims.GTot | [
"sometrivial"
] | [] | [
"FStar.Preorder.preorder",
"FStar.Monotonic.HyperStack.mreference",
"LowStar.Monotonic.Buffer.loc_addresses",
"FStar.Monotonic.HyperStack.frameOf",
"FStar.Set.singleton",
"Prims.nat",
"FStar.Monotonic.HyperStack.as_addr",
"LowStar.Monotonic.Buffer.loc"
] | [] | false | false | false | false | false | let loc_freed_mreference (#a: Type) (#p: Preorder.preorder a) (b: HS.mreference a p) : GTot loc =
| loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b)) | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.loc_union_l | val loc_union_l : l: Prims.list LowStar.Monotonic.Buffer.loc -> Prims.GTot LowStar.Monotonic.Buffer.loc | let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none) | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 63,
"end_line": 1012,
"start_col": 0,
"start_line": 1011
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"] | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | l: Prims.list LowStar.Monotonic.Buffer.loc -> Prims.GTot LowStar.Monotonic.Buffer.loc | Prims.GTot | [
"sometrivial"
] | [] | [
"Prims.list",
"LowStar.Monotonic.Buffer.loc",
"FStar.BigOps.normal",
"FStar.List.Tot.Base.fold_right_gtot",
"LowStar.Monotonic.Buffer.loc_union",
"LowStar.Monotonic.Buffer.loc_none"
] | [] | false | false | false | false | false | let loc_union_l (l: list loc) =
| BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none) | false |
|
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.get | val get (#a: Type0) (#rrel #rel: srel a) (h: HS.mem) (p: mbuffer a rrel rel) (i: nat)
: Ghost a (requires (i < length p)) (ensures (fun _ -> True)) | val get (#a: Type0) (#rrel #rel: srel a) (h: HS.mem) (p: mbuffer a rrel rel) (i: nat)
: Ghost a (requires (i < length p)) (ensures (fun _ -> True)) | let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 28,
"end_line": 248,
"start_col": 0,
"start_line": 246
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes. | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | h: FStar.Monotonic.HyperStack.mem -> p: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> i: Prims.nat
-> Prims.Ghost a | Prims.Ghost | [] | [] | [
"LowStar.Monotonic.Buffer.srel",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.nat",
"FStar.Seq.Base.index",
"LowStar.Monotonic.Buffer.as_seq",
"Prims.b2t",
"Prims.op_LessThan",
"LowStar.Monotonic.Buffer.length",
"Prims.l_True"
] | [] | false | false | false | false | false | let get (#a: Type0) (#rrel #rel: srel a) (h: HS.mem) (p: mbuffer a rrel rel) (i: nat)
: Ghost a (requires (i < length p)) (ensures (fun _ -> True)) =
| Seq.index (as_seq h p) i | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.loc_pairwise_disjoint | val loc_pairwise_disjoint (l: list loc) : Type0 | val loc_pairwise_disjoint (l: list loc) : Type0 | let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 82,
"end_line": 1019,
"start_col": 0,
"start_line": 1019
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"] | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | l: Prims.list LowStar.Monotonic.Buffer.loc -> Type0 | Prims.Tot | [
"total"
] | [] | [
"Prims.list",
"LowStar.Monotonic.Buffer.loc",
"FStar.BigOps.pairwise_and",
"LowStar.Monotonic.Buffer.loc_disjoint"
] | [] | false | false | false | true | true | let loc_pairwise_disjoint (l: list loc) : Type0 =
| BigOps.pairwise_and loc_disjoint l | false |
Vale.Transformers.BoundedInstructionEffects.fsti | Vale.Transformers.BoundedInstructionEffects.bounded_effects | val bounded_effects (rw: rw_set) (f: st unit) : GTot Type0 | val bounded_effects (rw: rw_set) (f: st unit) : GTot Type0 | let bounded_effects (rw:rw_set) (f:st unit) : GTot Type0 =
(only_affects rw.loc_writes f) /\
(forall s. {:pattern (constant_on_execution rw.loc_constant_writes f s)}
constant_on_execution rw.loc_constant_writes f s) /\
(forall l v. {:pattern (L.mem (|l,v|) rw.loc_constant_writes); (L.mem l rw.loc_writes)}
L.mem (|l,v|) rw.loc_constant_writes ==> L.mem l rw.loc_writes) /\
(
forall s1 s2. {:pattern (run f s1); (run f s2)} (
(s1.ms_ok = s2.ms_ok /\ unchanged_at rw.loc_reads s1 s2) ==> (
((run f s1).ms_ok = (run f s2).ms_ok) /\
((run f s1).ms_ok ==>
unchanged_at rw.loc_writes (run f s1) (run f s2))
)
)
) | {
"file_name": "vale/code/lib/transformers/Vale.Transformers.BoundedInstructionEffects.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 3,
"end_line": 93,
"start_col": 0,
"start_line": 79
} | module Vale.Transformers.BoundedInstructionEffects
open Vale.X64.Bytes_Code_s
open Vale.X64.Machine_s
open Vale.X64.Machine_Semantics_s
open Vale.Def.PossiblyMonad
open Vale.Transformers.Locations
module L = FStar.List.Tot
(** A [location_with_value] contains a location and the value it must hold *)
type location_with_value = l:location_eq & location_val_eqt l
(** A [locations_with_values] contains locations and values they must hold *)
type locations_with_values = list location_with_value
(** An [rw_set] contains information about what locations are read and
written by a stateful operation. *)
type rw_set = {
loc_reads: locations;
loc_writes: locations;
loc_constant_writes: locations_with_values;
}
(** [rw_set_of_ins i] returns the read/write sets for the execution of
an instruction. *)
val rw_set_of_ins : i:ins -> rw_set
(** [locations_of_ocmp o] returns the read set for a comparison operator. *)
val locations_of_ocmp : o:ocmp -> locations
(** [unchanged_except exc s1 s2] means all locations that are disjoint
from the exceptions [exc] have the same value in both [s1] and [s2]. *)
let unchanged_except (exceptions:locations) (s1 s2:machine_state) :
GTot Type0 =
(forall (a:location). {:pattern (eval_location a s2)} (
(!!(disjoint_location_from_locations a exceptions) ==>
(eval_location a s1 == eval_location a s2))
))
(** [only_affects locs f] means that running [f] leaves everything
except [locs] unchanged. *)
let only_affects (locs:locations) (f:st unit) : GTot Type0 =
forall s. {:pattern unchanged_except locs s (run f s)} (
(run f s).ms_ok ==> unchanged_except locs s (run f s)
)
(** [unchanged_at locs s1 s2] means the the value of any location in
[locs] is same in both [s1] and [s2]. *)
let rec unchanged_at (locs:locations) (s1 s2:machine_state) : GTot Type0 =
match locs with
| [] -> True
| x :: xs -> (
(eval_location x s1 == eval_location x s2) /\
(unchanged_at xs s1 s2)
)
(** [constant_on_execution locv f s] means that running [f] on [s]
ensures that the values of the locations in [locv] always match
the values given to them in [locv]. *)
let rec constant_on_execution (locv:locations_with_values) (f:st unit) (s:machine_state) : GTot Type0 =
(run f s).ms_ok ==> (
match locv with
| [] -> True
| (|l, v|) :: xs -> (
(eval_location l (run f s) == raise_location_val_eqt v) /\
(constant_on_execution xs f s)
)
)
(** [bounded_effects rw f] means that the execution of [f] is bounded
by the read-write [rw]. This means that whenever two different
states are same at the locations in [rw.loc_reads], then the
function will have the same effect, and that its effect is bounded
to the set [rw.loc_writes]. Additionally, execution always causes
the resultant state to cause the results to be written as per | {
"checked_file": "/",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Bytes_Code_s.fst.checked",
"Vale.Transformers.Locations.fsti.checked",
"Vale.Def.PossiblyMonad.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Transformers.BoundedInstructionEffects.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Vale.Transformers.Locations",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.PossiblyMonad",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Print_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Instructions_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Instruction_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Bytes_Code_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Vale.Transformers.Locations",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.PossiblyMonad",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Bytes_Code_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Transformers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Transformers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
rw: Vale.Transformers.BoundedInstructionEffects.rw_set ->
f: Vale.X64.Machine_Semantics_s.st Prims.unit
-> Prims.GTot Type0 | Prims.GTot | [
"sometrivial"
] | [] | [
"Vale.Transformers.BoundedInstructionEffects.rw_set",
"Vale.X64.Machine_Semantics_s.st",
"Prims.unit",
"Prims.l_and",
"Vale.Transformers.BoundedInstructionEffects.only_affects",
"Vale.Transformers.BoundedInstructionEffects.__proj__Mkrw_set__item__loc_writes",
"Prims.l_Forall",
"Vale.X64.Machine_Semantics_s.machine_state",
"Vale.Transformers.BoundedInstructionEffects.constant_on_execution",
"Vale.Transformers.BoundedInstructionEffects.__proj__Mkrw_set__item__loc_constant_writes",
"Vale.Transformers.Locations.location",
"Prims.eq2",
"Vale.Transformers.Locations.location_val_t",
"FStar.Universe.raise_t",
"Vale.Transformers.Locations.location_val_eqt",
"Prims.l_imp",
"Prims.b2t",
"FStar.List.Tot.Base.mem",
"Vale.Transformers.BoundedInstructionEffects.location_with_value",
"Prims.Mkdtuple2",
"Vale.Transformers.Locations.location_eq",
"Prims.op_Equality",
"Prims.bool",
"Vale.X64.Machine_Semantics_s.__proj__Mkmachine_state__item__ms_ok",
"Vale.Transformers.BoundedInstructionEffects.unchanged_at",
"Vale.Transformers.BoundedInstructionEffects.__proj__Mkrw_set__item__loc_reads",
"Vale.X64.Machine_Semantics_s.run"
] | [] | false | false | false | false | true | let bounded_effects (rw: rw_set) (f: st unit) : GTot Type0 =
| (only_affects rw.loc_writes f) /\
(forall s. {:pattern (constant_on_execution rw.loc_constant_writes f s)}
constant_on_execution rw.loc_constant_writes f s) /\
(forall l v. {:pattern (L.mem (| l, v |) rw.loc_constant_writes); (L.mem l rw.loc_writes)}
L.mem (| l, v |) rw.loc_constant_writes ==> L.mem l rw.loc_writes) /\
(forall s1 s2. {:pattern (run f s1); (run f s2)}
((s1.ms_ok = s2.ms_ok /\ unchanged_at rw.loc_reads s1 s2) ==>
(((run f s1).ms_ok = (run f s2).ms_ok) /\
((run f s1).ms_ok ==> unchanged_at rw.loc_writes (run f s1) (run f s2))))) | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.loc_includes_union_l_regions | val loc_includes_union_l_regions (s1 s2: loc) (prf: bool) (r: Set.set HS.rid)
: Lemma (requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))] | val loc_includes_union_l_regions (s1 s2: loc) (prf: bool) (r: Set.set HS.rid)
: Lemma (requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))] | let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r) | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 48,
"end_line": 875,
"start_col": 0,
"start_line": 867
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s1: LowStar.Monotonic.Buffer.loc ->
s2: LowStar.Monotonic.Buffer.loc ->
prf: Prims.bool ->
r: FStar.Set.set FStar.Monotonic.HyperHeap.rid
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.loc_includes s1 (LowStar.Monotonic.Buffer.loc_regions prf r) \/
LowStar.Monotonic.Buffer.loc_includes s2 (LowStar.Monotonic.Buffer.loc_regions prf r))
(ensures
LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_union s1 s2)
(LowStar.Monotonic.Buffer.loc_regions prf r))
[
SMTPat (LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_union s1 s2)
(LowStar.Monotonic.Buffer.loc_regions prf r))
] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowStar.Monotonic.Buffer.loc",
"Prims.bool",
"FStar.Set.set",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.loc_includes_union_l",
"LowStar.Monotonic.Buffer.loc_regions",
"Prims.unit",
"Prims.l_or",
"LowStar.Monotonic.Buffer.loc_includes",
"Prims.squash",
"LowStar.Monotonic.Buffer.loc_union",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | true | false | true | false | false | let loc_includes_union_l_regions (s1 s2: loc) (prf: bool) (r: Set.set HS.rid)
: Lemma (requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))] =
| loc_includes_union_l s1 s2 (loc_regions prf r) | false |
Vale.Transformers.BoundedInstructionEffects.fsti | Vale.Transformers.BoundedInstructionEffects.constant_on_execution | val constant_on_execution (locv: locations_with_values) (f: st unit) (s: machine_state) : GTot Type0 | val constant_on_execution (locv: locations_with_values) (f: st unit) (s: machine_state) : GTot Type0 | let rec constant_on_execution (locv:locations_with_values) (f:st unit) (s:machine_state) : GTot Type0 =
(run f s).ms_ok ==> (
match locv with
| [] -> True
| (|l, v|) :: xs -> (
(eval_location l (run f s) == raise_location_val_eqt v) /\
(constant_on_execution xs f s)
)
) | {
"file_name": "vale/code/lib/transformers/Vale.Transformers.BoundedInstructionEffects.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 3,
"end_line": 70,
"start_col": 0,
"start_line": 62
} | module Vale.Transformers.BoundedInstructionEffects
open Vale.X64.Bytes_Code_s
open Vale.X64.Machine_s
open Vale.X64.Machine_Semantics_s
open Vale.Def.PossiblyMonad
open Vale.Transformers.Locations
module L = FStar.List.Tot
(** A [location_with_value] contains a location and the value it must hold *)
type location_with_value = l:location_eq & location_val_eqt l
(** A [locations_with_values] contains locations and values they must hold *)
type locations_with_values = list location_with_value
(** An [rw_set] contains information about what locations are read and
written by a stateful operation. *)
type rw_set = {
loc_reads: locations;
loc_writes: locations;
loc_constant_writes: locations_with_values;
}
(** [rw_set_of_ins i] returns the read/write sets for the execution of
an instruction. *)
val rw_set_of_ins : i:ins -> rw_set
(** [locations_of_ocmp o] returns the read set for a comparison operator. *)
val locations_of_ocmp : o:ocmp -> locations
(** [unchanged_except exc s1 s2] means all locations that are disjoint
from the exceptions [exc] have the same value in both [s1] and [s2]. *)
let unchanged_except (exceptions:locations) (s1 s2:machine_state) :
GTot Type0 =
(forall (a:location). {:pattern (eval_location a s2)} (
(!!(disjoint_location_from_locations a exceptions) ==>
(eval_location a s1 == eval_location a s2))
))
(** [only_affects locs f] means that running [f] leaves everything
except [locs] unchanged. *)
let only_affects (locs:locations) (f:st unit) : GTot Type0 =
forall s. {:pattern unchanged_except locs s (run f s)} (
(run f s).ms_ok ==> unchanged_except locs s (run f s)
)
(** [unchanged_at locs s1 s2] means the the value of any location in
[locs] is same in both [s1] and [s2]. *)
let rec unchanged_at (locs:locations) (s1 s2:machine_state) : GTot Type0 =
match locs with
| [] -> True
| x :: xs -> (
(eval_location x s1 == eval_location x s2) /\
(unchanged_at xs s1 s2)
)
(** [constant_on_execution locv f s] means that running [f] on [s]
ensures that the values of the locations in [locv] always match | {
"checked_file": "/",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Bytes_Code_s.fst.checked",
"Vale.Transformers.Locations.fsti.checked",
"Vale.Def.PossiblyMonad.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Transformers.BoundedInstructionEffects.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Vale.Transformers.Locations",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.PossiblyMonad",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Print_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Instructions_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Instruction_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Bytes_Code_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Vale.Transformers.Locations",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.PossiblyMonad",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Bytes_Code_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Transformers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Transformers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
locv: Vale.Transformers.BoundedInstructionEffects.locations_with_values ->
f: Vale.X64.Machine_Semantics_s.st Prims.unit ->
s: Vale.X64.Machine_Semantics_s.machine_state
-> Prims.GTot Type0 | Prims.GTot | [
"sometrivial"
] | [] | [
"Vale.Transformers.BoundedInstructionEffects.locations_with_values",
"Vale.X64.Machine_Semantics_s.st",
"Prims.unit",
"Vale.X64.Machine_Semantics_s.machine_state",
"Prims.l_imp",
"Prims.b2t",
"Vale.X64.Machine_Semantics_s.__proj__Mkmachine_state__item__ms_ok",
"Vale.X64.Machine_Semantics_s.run",
"Prims.l_True",
"Vale.Transformers.Locations.location_eq",
"Vale.Transformers.Locations.location_val_eqt",
"Prims.list",
"Vale.Transformers.BoundedInstructionEffects.location_with_value",
"Prims.l_and",
"Prims.eq2",
"Vale.Transformers.Locations.location_val_t",
"Vale.Transformers.Locations.eval_location",
"Vale.Transformers.Locations.raise_location_val_eqt",
"Vale.Transformers.BoundedInstructionEffects.constant_on_execution",
"Prims.logical"
] | [
"recursion"
] | false | false | false | false | true | let rec constant_on_execution (locv: locations_with_values) (f: st unit) (s: machine_state)
: GTot Type0 =
| (run f s).ms_ok ==>
(match locv with
| [] -> True
| (| l , v |) :: xs ->
((eval_location l (run f s) == raise_location_val_eqt v) /\ (constant_on_execution xs f s))) | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.loc_in | val loc_in : l: LowStar.Monotonic.Buffer.loc -> h: FStar.Monotonic.HyperStack.mem -> Type0 | let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 38,
"end_line": 1563,
"start_col": 0,
"start_line": 1562
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | l: LowStar.Monotonic.Buffer.loc -> h: FStar.Monotonic.HyperStack.mem -> Type0 | Prims.Tot | [
"total"
] | [] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_not_unused_in"
] | [] | false | false | false | true | true | let loc_in (l: loc) (h: HS.mem) =
| (loc_not_unused_in h) `loc_includes` l | false |
|
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.loc_disjoint_union_r' | val loc_disjoint_union_r' (s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))] | val loc_disjoint_union_r' (s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))] | let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 74,
"end_line": 930,
"start_col": 0,
"start_line": 921
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2')) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s: LowStar.Monotonic.Buffer.loc ->
s1: LowStar.Monotonic.Buffer.loc ->
s2: LowStar.Monotonic.Buffer.loc
-> FStar.Pervasives.Lemma
(ensures
LowStar.Monotonic.Buffer.loc_disjoint s (LowStar.Monotonic.Buffer.loc_union s1 s2) <==>
LowStar.Monotonic.Buffer.loc_disjoint s s1 /\ LowStar.Monotonic.Buffer.loc_disjoint s s2)
[SMTPat (LowStar.Monotonic.Buffer.loc_disjoint s (LowStar.Monotonic.Buffer.loc_union s1 s2))] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Classical.move_requires",
"Prims.l_and",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_union",
"LowStar.Monotonic.Buffer.loc_disjoint",
"LowStar.Monotonic.Buffer.loc_disjoint_includes",
"Prims.unit",
"LowStar.Monotonic.Buffer.loc_includes_union_l",
"LowStar.Monotonic.Buffer.loc_disjoint_union_r",
"Prims.l_True",
"Prims.squash",
"Prims.l_iff",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | false | false | true | false | false | let loc_disjoint_union_r' (s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))] =
| Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2 | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.loc_includes_union_l_buffer | val loc_includes_union_l_buffer
(s1 s2: loc)
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
: Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))] | val loc_includes_union_l_buffer
(s1 s2: loc)
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
: Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))] | let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b) | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 45,
"end_line": 854,
"start_col": 0,
"start_line": 847
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s1: LowStar.Monotonic.Buffer.loc ->
s2: LowStar.Monotonic.Buffer.loc ->
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.loc_includes s1 (LowStar.Monotonic.Buffer.loc_buffer b) \/
LowStar.Monotonic.Buffer.loc_includes s2 (LowStar.Monotonic.Buffer.loc_buffer b))
(ensures
LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_union s1 s2)
(LowStar.Monotonic.Buffer.loc_buffer b))
[
SMTPat (LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_union s1 s2)
(LowStar.Monotonic.Buffer.loc_buffer b))
] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowStar.Monotonic.Buffer.loc",
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.loc_includes_union_l",
"LowStar.Monotonic.Buffer.loc_buffer",
"Prims.unit",
"Prims.l_or",
"LowStar.Monotonic.Buffer.loc_includes",
"Prims.squash",
"LowStar.Monotonic.Buffer.loc_union",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | true | false | true | false | false | let loc_includes_union_l_buffer
(s1 s2: loc)
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
: Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))] =
| loc_includes_union_l s1 s2 (loc_buffer b) | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.loc_not_in | val loc_not_in : l: LowStar.Monotonic.Buffer.loc -> h: FStar.Monotonic.HyperStack.mem -> Type0 | let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 34,
"end_line": 1566,
"start_col": 0,
"start_line": 1565
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | l: LowStar.Monotonic.Buffer.loc -> h: FStar.Monotonic.HyperStack.mem -> Type0 | Prims.Tot | [
"total"
] | [] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_unused_in"
] | [] | false | false | false | true | true | let loc_not_in (l: loc) (h: HS.mem) =
| (loc_unused_in h) `loc_includes` l | false |
|
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.fresh_loc | val fresh_loc (l: loc) (h h': HS.mem) : GTot Type0 | val fresh_loc (l: loc) (h h': HS.mem) : GTot Type0 | let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 39,
"end_line": 1676,
"start_col": 0,
"start_line": 1674
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
l: LowStar.Monotonic.Buffer.loc ->
h: FStar.Monotonic.HyperStack.mem ->
h': FStar.Monotonic.HyperStack.mem
-> Prims.GTot Type0 | Prims.GTot | [
"sometrivial"
] | [] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_unused_in",
"LowStar.Monotonic.Buffer.loc_not_unused_in"
] | [] | false | false | false | false | true | let fresh_loc (l: loc) (h h': HS.mem) : GTot Type0 =
| (loc_unused_in h) `loc_includes` l /\ (loc_not_unused_in h') `loc_includes` l | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.loc_includes_union_l_addresses | val loc_includes_union_l_addresses (s1 s2: loc) (prf: bool) (r: HS.rid) (a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a))
)
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))] | val loc_includes_union_l_addresses (s1 s2: loc) (prf: bool) (r: HS.rid) (a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a))
)
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))] | let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a) | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 52,
"end_line": 865,
"start_col": 0,
"start_line": 856
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s1: LowStar.Monotonic.Buffer.loc ->
s2: LowStar.Monotonic.Buffer.loc ->
prf: Prims.bool ->
r: FStar.Monotonic.HyperHeap.rid ->
a: FStar.Set.set Prims.nat
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.loc_includes s1 (LowStar.Monotonic.Buffer.loc_addresses prf r a) \/
LowStar.Monotonic.Buffer.loc_includes s2 (LowStar.Monotonic.Buffer.loc_addresses prf r a))
(ensures
LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_union s1 s2)
(LowStar.Monotonic.Buffer.loc_addresses prf r a))
[
SMTPat (LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_union s1 s2)
(LowStar.Monotonic.Buffer.loc_addresses prf r a))
] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowStar.Monotonic.Buffer.loc",
"Prims.bool",
"FStar.Monotonic.HyperHeap.rid",
"FStar.Set.set",
"Prims.nat",
"LowStar.Monotonic.Buffer.loc_includes_union_l",
"LowStar.Monotonic.Buffer.loc_addresses",
"Prims.unit",
"Prims.l_or",
"LowStar.Monotonic.Buffer.loc_includes",
"Prims.squash",
"LowStar.Monotonic.Buffer.loc_union",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | true | false | true | false | false | let loc_includes_union_l_addresses (s1 s2: loc) (prf: bool) (r: HS.rid) (a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a))
)
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))] =
| loc_includes_union_l s1 s2 (loc_addresses prf r a) | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.loc_includes_addresses_addresses_1 | val loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma (requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures
(loc_includes (loc_addresses preserve_liveness1 r1 s1)
(loc_addresses preserve_liveness2 r2 s2)))
[
SMTPat
(loc_includes (loc_addresses preserve_liveness1 r1 s1)
(loc_addresses preserve_liveness2 r2 s2))
] | val loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma (requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures
(loc_includes (loc_addresses preserve_liveness1 r1 s1)
(loc_addresses preserve_liveness2 r2 s2)))
[
SMTPat
(loc_includes (loc_addresses preserve_liveness1 r1 s1)
(loc_addresses preserve_liveness2 r2 s2))
] | let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 81,
"end_line": 834,
"start_col": 0,
"start_line": 826
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2))) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
preserve_liveness1: Prims.bool ->
preserve_liveness2: Prims.bool ->
r1: FStar.Monotonic.HyperHeap.rid ->
r2: FStar.Monotonic.HyperHeap.rid ->
s1: FStar.Set.set Prims.nat ->
s2: FStar.Set.set Prims.nat
-> FStar.Pervasives.Lemma
(requires r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ FStar.Set.subset s2 s1)
(ensures
LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_addresses preserve_liveness1
r1
s1)
(LowStar.Monotonic.Buffer.loc_addresses preserve_liveness2 r2 s2))
[
SMTPat (LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_addresses preserve_liveness1
r1
s1)
(LowStar.Monotonic.Buffer.loc_addresses preserve_liveness2 r2 s2))
] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Prims.bool",
"FStar.Monotonic.HyperHeap.rid",
"FStar.Set.set",
"Prims.nat",
"LowStar.Monotonic.Buffer.loc_includes_addresses_addresses",
"Prims.unit",
"Prims.l_and",
"Prims.eq2",
"Prims.l_imp",
"Prims.b2t",
"FStar.Set.subset",
"Prims.squash",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_addresses",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | true | false | true | false | false | let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma (requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures
(loc_includes (loc_addresses preserve_liveness1 r1 s1)
(loc_addresses preserve_liveness2 r2 s2)))
[
SMTPat
(loc_includes (loc_addresses preserve_liveness1 r1 s1)
(loc_addresses preserve_liveness2 r2 s2))
] =
| loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2 | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.loc_disjoint_includes_r | val loc_disjoint_includes_r (b1 b2 b2': loc)
: Lemma (requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')] | val loc_disjoint_includes_r (b1 b2 b2': loc)
: Lemma (requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')] | let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2' | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 36,
"end_line": 936,
"start_col": 0,
"start_line": 932
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
b1: LowStar.Monotonic.Buffer.loc ->
b2: LowStar.Monotonic.Buffer.loc ->
b2': LowStar.Monotonic.Buffer.loc
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.loc_includes b2 b2' /\ LowStar.Monotonic.Buffer.loc_disjoint b1 b2)
(ensures LowStar.Monotonic.Buffer.loc_disjoint b1 b2')
[
SMTPat (LowStar.Monotonic.Buffer.loc_disjoint b1 b2');
SMTPat (LowStar.Monotonic.Buffer.loc_includes b2 b2')
] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowStar.Monotonic.Buffer.loc",
"LowStar.Monotonic.Buffer.loc_disjoint_includes",
"Prims.unit",
"Prims.l_and",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_disjoint",
"Prims.squash",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | true | false | true | false | false | let loc_disjoint_includes_r (b1 b2 b2': loc)
: Lemma (requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')] =
| loc_disjoint_includes b1 b2 b1 b2' | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.loc_disjoint_sym' | val loc_disjoint_sym' (s1 s2: loc)
: Lemma (loc_disjoint s1 s2 <==> loc_disjoint s2 s1) [SMTPat (loc_disjoint s1 s2)] | val loc_disjoint_sym' (s1 s2: loc)
: Lemma (loc_disjoint s1 s2 <==> loc_disjoint s2 s1) [SMTPat (loc_disjoint s1 s2)] | let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 50,
"end_line": 897,
"start_col": 0,
"start_line": 891
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s1: LowStar.Monotonic.Buffer.loc -> s2: LowStar.Monotonic.Buffer.loc
-> FStar.Pervasives.Lemma
(ensures
LowStar.Monotonic.Buffer.loc_disjoint s1 s2 <==> LowStar.Monotonic.Buffer.loc_disjoint s2 s1
) [SMTPat (LowStar.Monotonic.Buffer.loc_disjoint s1 s2)] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Classical.move_requires",
"LowStar.Monotonic.Buffer.loc_disjoint",
"LowStar.Monotonic.Buffer.loc_disjoint_sym",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.l_iff",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | false | false | true | false | false | let loc_disjoint_sym' (s1 s2: loc)
: Lemma (loc_disjoint s1 s2 <==> loc_disjoint s2 s1) [SMTPat (loc_disjoint s1 s2)] =
| Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1 | false |
Vale.Transformers.BoundedInstructionEffects.fsti | Vale.Transformers.BoundedInstructionEffects.unchanged_at | val unchanged_at (locs: locations) (s1 s2: machine_state) : GTot Type0 | val unchanged_at (locs: locations) (s1 s2: machine_state) : GTot Type0 | let rec unchanged_at (locs:locations) (s1 s2:machine_state) : GTot Type0 =
match locs with
| [] -> True
| x :: xs -> (
(eval_location x s1 == eval_location x s2) /\
(unchanged_at xs s1 s2)
) | {
"file_name": "vale/code/lib/transformers/Vale.Transformers.BoundedInstructionEffects.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 5,
"end_line": 57,
"start_col": 0,
"start_line": 51
} | module Vale.Transformers.BoundedInstructionEffects
open Vale.X64.Bytes_Code_s
open Vale.X64.Machine_s
open Vale.X64.Machine_Semantics_s
open Vale.Def.PossiblyMonad
open Vale.Transformers.Locations
module L = FStar.List.Tot
(** A [location_with_value] contains a location and the value it must hold *)
type location_with_value = l:location_eq & location_val_eqt l
(** A [locations_with_values] contains locations and values they must hold *)
type locations_with_values = list location_with_value
(** An [rw_set] contains information about what locations are read and
written by a stateful operation. *)
type rw_set = {
loc_reads: locations;
loc_writes: locations;
loc_constant_writes: locations_with_values;
}
(** [rw_set_of_ins i] returns the read/write sets for the execution of
an instruction. *)
val rw_set_of_ins : i:ins -> rw_set
(** [locations_of_ocmp o] returns the read set for a comparison operator. *)
val locations_of_ocmp : o:ocmp -> locations
(** [unchanged_except exc s1 s2] means all locations that are disjoint
from the exceptions [exc] have the same value in both [s1] and [s2]. *)
let unchanged_except (exceptions:locations) (s1 s2:machine_state) :
GTot Type0 =
(forall (a:location). {:pattern (eval_location a s2)} (
(!!(disjoint_location_from_locations a exceptions) ==>
(eval_location a s1 == eval_location a s2))
))
(** [only_affects locs f] means that running [f] leaves everything
except [locs] unchanged. *)
let only_affects (locs:locations) (f:st unit) : GTot Type0 =
forall s. {:pattern unchanged_except locs s (run f s)} (
(run f s).ms_ok ==> unchanged_except locs s (run f s)
)
(** [unchanged_at locs s1 s2] means the the value of any location in | {
"checked_file": "/",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Bytes_Code_s.fst.checked",
"Vale.Transformers.Locations.fsti.checked",
"Vale.Def.PossiblyMonad.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Transformers.BoundedInstructionEffects.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Vale.Transformers.Locations",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.PossiblyMonad",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Print_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Instructions_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Instruction_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Bytes_Code_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Vale.Transformers.Locations",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.PossiblyMonad",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Bytes_Code_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Transformers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Transformers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
locs: Vale.Transformers.Locations.locations ->
s1: Vale.X64.Machine_Semantics_s.machine_state ->
s2: Vale.X64.Machine_Semantics_s.machine_state
-> Prims.GTot Type0 | Prims.GTot | [
"sometrivial"
] | [] | [
"Vale.Transformers.Locations.locations",
"Vale.X64.Machine_Semantics_s.machine_state",
"Prims.l_True",
"Vale.Transformers.Locations.location",
"Prims.list",
"Prims.l_and",
"Prims.eq2",
"Vale.Transformers.Locations.location_val_t",
"Vale.Transformers.Locations.eval_location",
"Vale.Transformers.BoundedInstructionEffects.unchanged_at"
] | [
"recursion"
] | false | false | false | false | true | let rec unchanged_at (locs: locations) (s1 s2: machine_state) : GTot Type0 =
| match locs with
| [] -> True
| x :: xs -> ((eval_location x s1 == eval_location x s2) /\ (unchanged_at xs s1 s2)) | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.disjoint | val disjoint
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
: GTot Type0 | val disjoint
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
: GTot Type0 | let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2) | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 46,
"end_line": 1727,
"start_col": 0,
"start_line": 1725
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
/// | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
b1: LowStar.Monotonic.Buffer.mbuffer a1 rrel1 rel1 ->
b2: LowStar.Monotonic.Buffer.mbuffer a2 rrel2 rel2
-> Prims.GTot Type0 | Prims.GTot | [
"sometrivial"
] | [] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.loc_disjoint",
"LowStar.Monotonic.Buffer.loc_buffer"
] | [] | false | false | false | false | true | let disjoint
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
: GTot Type0 =
| loc_disjoint (loc_buffer b1) (loc_buffer b2) | false |
Hacl.Spec.PrecompBaseTable256.fst | Hacl.Spec.PrecompBaseTable256.lemma_decompose_nat256_as_four_u64_lbignum | val lemma_decompose_nat256_as_four_u64_lbignum: x:BD.lbignum U64 4{BD.bn_v x < pow2 256} ->
Lemma (let (x0, x1, x2, x3) = decompose_nat256_as_four_u64 (BD.bn_v x) in
BD.bn_v (LSeq.sub x 0 1) == x0 /\
BD.bn_v (LSeq.sub x 1 1) == x1 /\
BD.bn_v (LSeq.sub x 2 1) == x2 /\
BD.bn_v (LSeq.sub x 3 1) == x3) | val lemma_decompose_nat256_as_four_u64_lbignum: x:BD.lbignum U64 4{BD.bn_v x < pow2 256} ->
Lemma (let (x0, x1, x2, x3) = decompose_nat256_as_four_u64 (BD.bn_v x) in
BD.bn_v (LSeq.sub x 0 1) == x0 /\
BD.bn_v (LSeq.sub x 1 1) == x1 /\
BD.bn_v (LSeq.sub x 2 1) == x2 /\
BD.bn_v (LSeq.sub x 3 1) == x3) | let lemma_decompose_nat256_as_four_u64_lbignum x =
let open Lib.Sequence in
let bn_x0 = LSeq.sub x 0 1 in
let bn_x1 = LSeq.sub x 1 1 in
let bn_x2 = LSeq.sub x 2 1 in
let bn_x3 = LSeq.sub x 3 1 in
assert_norm (pow2 0 = 1);
BD.bn_eval1 bn_x0;
BD.bn_eval_index x 0;
BD.bn_eval1 bn_x1;
BD.bn_eval_index x 1;
BD.bn_eval1 bn_x2;
BD.bn_eval_index x 2;
BD.bn_eval1 bn_x3;
BD.bn_eval_index x 3 | {
"file_name": "code/bignum/Hacl.Spec.PrecompBaseTable256.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 22,
"end_line": 163,
"start_col": 0,
"start_line": 146
} | module Hacl.Spec.PrecompBaseTable256
open FStar.Mul
open Lib.IntTypes
module LSeq = Lib.Sequence
module Loops = Lib.LoopCombinators
module LE = Lib.Exponentiation
module SE = Spec.Exponentiation
module BD = Hacl.Spec.Bignum.Definitions
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
let lemma_mod_pow2_sub x a b =
calc (==) {
x / pow2 a % pow2 b * pow2 a;
(==) { Math.Lemmas.pow2_modulo_division_lemma_1 x a (a + b) }
x % pow2 (a + b) / pow2 a * pow2 a;
(==) { Math.Lemmas.euclidean_division_definition (x % pow2 (a + b)) (pow2 a) }
x % pow2 (a + b) - x % pow2 (a + b) % pow2 a;
(==) { Math.Lemmas.pow2_modulo_modulo_lemma_1 x a (a + b) }
x % pow2 (a + b) - x % pow2 a;
}
let lemma_decompose_nat256_as_four_u64 x =
let x0 = x % pow2 64 in
let x1 = x / pow2 64 % pow2 64 in
let x2 = x / pow2 128 % pow2 64 in
let x3' = x / pow2 192 % pow2 64 in
Math.Lemmas.lemma_div_lt x 256 192;
Math.Lemmas.small_mod (x / pow2 192) (pow2 64);
let x3 = x / pow2 192 in
assert (x3 == x3');
calc (==) {
x0 + x1 * pow2 64 + x2 * pow2 128 + x3 * pow2 192;
(==) { }
x0 + x1 * pow2 64 + (x / pow2 128 % pow2 64) * pow2 128 + x / pow2 192 * pow2 192;
(==) { lemma_mod_pow2_sub x 128 64 }
x0 + x1 * pow2 64 + x % pow2 192 - x % pow2 128 + x / pow2 192 * pow2 192;
(==) { Math.Lemmas.euclidean_division_definition x (pow2 192) }
x0 + x1 * pow2 64 - x % pow2 128 + x;
(==) { lemma_mod_pow2_sub x 64 64 }
x;
}
let lemma_point_mul_base_precomp4 #t k a b =
let (b0, b1, b2, b3) = decompose_nat256_as_four_u64 b in
let a_pow2_64 = LE.pow k a (pow2 64) in
let a_pow2_128 = LE.pow k a (pow2 128) in
let a_pow2_192 = LE.pow k a (pow2 192) in
let res = LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 in
calc (==) {
LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4;
(==) { LE.exp_four_fw_lemma k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 }
k.LE.mul
(k.LE.mul
(k.LE.mul (LE.pow k a b0) (LE.pow k (LE.pow k a (pow2 64)) b1))
(LE.pow k a_pow2_128 b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_mul k a (pow2 64) b1 }
k.LE.mul
(k.LE.mul
(k.LE.mul (LE.pow k a b0) (LE.pow k a (b1 * pow2 64)))
(LE.pow k a_pow2_128 b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_add k a b0 (b1 * pow2 64) }
k.LE.mul
(k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64))
(LE.pow k (LE.pow k a (pow2 128)) b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_mul k a (pow2 128) b2 }
k.LE.mul
(k.LE.mul (LE.pow k a (b0 + b1 * pow2 64)) (LE.pow k a (b2 * pow2 128)))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_add k a (b0 + b1 * pow2 64) (b2 * pow2 128) }
k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128))
(LE.pow k (LE.pow k a (pow2 192)) b3);
(==) { LE.lemma_pow_mul k a (pow2 192) b3 }
k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128))
(LE.pow k a (b3 * pow2 192));
(==) { LE.lemma_pow_add k a (b0 + b1 * pow2 64 + b2 * pow2 128) (b3 * pow2 192) }
LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128 + b3 * pow2 192);
(==) { lemma_decompose_nat256_as_four_u64 b }
LE.pow k a b;
}
//-----------------------
#push-options "--fuel 2"
let rec exp_pow2_rec_is_exp_pow2 #t k a b =
if b = 0 then Lib.LoopCombinators.eq_repeat0 k.sqr a
else begin
Lib.LoopCombinators.unfold_repeat b k.sqr a (b - 1);
assert (Loops.repeat b k.sqr a == k.sqr (Loops.repeat (b - 1) k.sqr a));
exp_pow2_rec_is_exp_pow2 k a (b - 1) end
#pop-options
let a_pow2_64_lemma #t k a =
SE.exp_pow2_lemma k a 64;
LE.exp_pow2_lemma k.SE.to.SE.comm_monoid (k.SE.to.SE.refl a) 64
let a_pow2_128_lemma #t k a =
let cm = k.SE.to.SE.comm_monoid in
let refl = k.SE.to.SE.refl in
calc (==) {
refl (a_pow2_128 k a);
(==) { }
refl (SE.exp_pow2 k (a_pow2_64 k a) 64);
(==) { a_pow2_64_lemma k (a_pow2_64 k a) }
LE.pow cm (refl (a_pow2_64 k a)) (pow2 64);
(==) { a_pow2_64_lemma k a }
LE.pow cm (LE.pow cm (refl a) (pow2 64)) (pow2 64);
(==) { LE.lemma_pow_mul cm (refl a) (pow2 64) (pow2 64) }
LE.pow cm (refl a) (pow2 64 * pow2 64);
(==) { Math.Lemmas.pow2_plus 64 64 }
LE.pow cm (refl a) (pow2 128);
}
let a_pow2_192_lemma #t k a =
let cm = k.SE.to.SE.comm_monoid in
let refl = k.SE.to.SE.refl in
calc (==) {
refl (a_pow2_192 k a);
(==) { }
refl (SE.exp_pow2 k (a_pow2_128 k a) 64);
(==) { a_pow2_64_lemma k (a_pow2_128 k a) }
LE.pow cm (refl (a_pow2_128 k a)) (pow2 64);
(==) { a_pow2_128_lemma k a }
LE.pow cm (LE.pow cm (refl a) (pow2 128)) (pow2 64);
(==) { LE.lemma_pow_mul cm (refl a) (pow2 128) (pow2 64) }
LE.pow cm (refl a) (pow2 128 * pow2 64);
(==) { Math.Lemmas.pow2_plus 128 64 }
LE.pow cm (refl a) (pow2 192);
} | {
"checked_file": "/",
"dependencies": [
"Spec.Exponentiation.fsti.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Exponentiation.fsti.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": true,
"source_file": "Hacl.Spec.PrecompBaseTable256.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loops"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "Spec.Exponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation",
"short_module": "LE"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
x:
Hacl.Spec.Bignum.Definitions.lbignum Lib.IntTypes.U64 4
{Hacl.Spec.Bignum.Definitions.bn_v x < Prims.pow2 256}
-> FStar.Pervasives.Lemma
(ensures
(let _ =
Hacl.Spec.PrecompBaseTable256.decompose_nat256_as_four_u64 (Hacl.Spec.Bignum.Definitions.bn_v
x)
in
(let FStar.Pervasives.Native.Mktuple4 #_ #_ #_ #_ x0 x1 x2 x3 = _ in
Hacl.Spec.Bignum.Definitions.bn_v (Lib.Sequence.sub x 0 1) == x0 /\
Hacl.Spec.Bignum.Definitions.bn_v (Lib.Sequence.sub x 1 1) == x1 /\
Hacl.Spec.Bignum.Definitions.bn_v (Lib.Sequence.sub x 2 1) == x2 /\
Hacl.Spec.Bignum.Definitions.bn_v (Lib.Sequence.sub x 3 1) == x3)
<:
Type0)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Hacl.Spec.Bignum.Definitions.lbignum",
"Lib.IntTypes.U64",
"Prims.b2t",
"Prims.op_LessThan",
"Hacl.Spec.Bignum.Definitions.bn_v",
"Prims.pow2",
"Hacl.Spec.Bignum.Definitions.bn_eval_index",
"Prims.unit",
"Hacl.Spec.Bignum.Definitions.bn_eval1",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"Lib.Sequence.lseq",
"Hacl.Spec.Bignum.Definitions.limb",
"Prims.l_and",
"Prims.eq2",
"FStar.Seq.Base.seq",
"Lib.Sequence.to_seq",
"FStar.Seq.Base.slice",
"Prims.op_Addition",
"Prims.l_Forall",
"Prims.nat",
"Prims.l_or",
"FStar.Seq.Base.index",
"Lib.Sequence.index",
"Lib.Sequence.sub"
] | [] | true | false | true | false | false | let lemma_decompose_nat256_as_four_u64_lbignum x =
| let open Lib.Sequence in
let bn_x0 = LSeq.sub x 0 1 in
let bn_x1 = LSeq.sub x 1 1 in
let bn_x2 = LSeq.sub x 2 1 in
let bn_x3 = LSeq.sub x 3 1 in
assert_norm (pow2 0 = 1);
BD.bn_eval1 bn_x0;
BD.bn_eval_index x 0;
BD.bn_eval1 bn_x1;
BD.bn_eval_index x 1;
BD.bn_eval1 bn_x2;
BD.bn_eval_index x 2;
BD.bn_eval1 bn_x3;
BD.bn_eval_index x 3 | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.includes | val includes
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
: GTot Type0 | val includes
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
: GTot Type0 | let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2) | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 34,
"end_line": 1732,
"start_col": 0,
"start_line": 1729
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
b1: LowStar.Monotonic.Buffer.mbuffer a1 rrel1 rel1 ->
b2: LowStar.Monotonic.Buffer.mbuffer a2 rrel2 rel2
-> Prims.GTot Type0 | Prims.GTot | [
"sometrivial"
] | [] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.l_and",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_buffer",
"Prims.l_iff",
"Prims.b2t",
"LowStar.Monotonic.Buffer.g_is_null"
] | [] | false | false | false | false | true | let includes
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
: GTot Type0 =
| loc_includes (loc_buffer b1) (loc_buffer b2) /\ (g_is_null b1 <==> g_is_null b2) | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.modifies_liveness_insensitive_buffer_weak | val modifies_liveness_insensitive_buffer_weak
(l: loc)
(h h': HS.mem)
(#a: Type0)
(#rrel #rel: srel a)
(x: mbuffer a rrel rel)
: Lemma
(requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[
SMTPatOr
[
[SMTPat (live h x); SMTPat (modifies l h h')];
[SMTPat (live h' x); SMTPat (modifies l h h')]
]
] | val modifies_liveness_insensitive_buffer_weak
(l: loc)
(h h': HS.mem)
(#a: Type0)
(#rrel #rel: srel a)
(x: mbuffer a rrel rel)
: Lemma
(requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[
SMTPatOr
[
[SMTPat (live h x); SMTPat (modifies l h h')];
[SMTPat (live h' x); SMTPat (modifies l h h')]
]
] | let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 58,
"end_line": 1207,
"start_col": 0,
"start_line": 1196
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
l: LowStar.Monotonic.Buffer.loc ->
h: FStar.Monotonic.HyperStack.mem ->
h': FStar.Monotonic.HyperStack.mem ->
x: LowStar.Monotonic.Buffer.mbuffer a rrel rel
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.modifies l h h' /\
LowStar.Monotonic.Buffer.loc_includes LowStar.Monotonic.Buffer.address_liveness_insensitive_locs
l /\ LowStar.Monotonic.Buffer.live h x)
(ensures LowStar.Monotonic.Buffer.live h' x)
[
SMTPatOr [
[
SMTPat (LowStar.Monotonic.Buffer.live h x);
SMTPat (LowStar.Monotonic.Buffer.modifies l h h')
];
[
SMTPat (LowStar.Monotonic.Buffer.live h' x);
SMTPat (LowStar.Monotonic.Buffer.modifies l h h')
]
]
] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.modifies_liveness_insensitive_buffer",
"LowStar.Monotonic.Buffer.loc_none",
"Prims.unit",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.address_liveness_insensitive_locs",
"LowStar.Monotonic.Buffer.live",
"Prims.squash",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat_or",
"Prims.list",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | true | false | true | false | false | let modifies_liveness_insensitive_buffer_weak
(l: loc)
(h h': HS.mem)
(#a: Type0)
(#rrel #rel: srel a)
(x: mbuffer a rrel rel)
: Lemma
(requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[
SMTPatOr
[
[SMTPat (live h x); SMTPat (modifies l h h')];
[SMTPat (live h' x); SMTPat (modifies l h h')]
]
] =
| modifies_liveness_insensitive_buffer loc_none l h h' x | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.rrel_rel_always_compatible | val rrel_rel_always_compatible : rrel: LowStar.Monotonic.Buffer.srel a -> rel: LowStar.Monotonic.Buffer.srel a -> Prims.logical | let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 99,
"end_line": 1922,
"start_col": 0,
"start_line": 1921
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | rrel: LowStar.Monotonic.Buffer.srel a -> rel: LowStar.Monotonic.Buffer.srel a -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"LowStar.Monotonic.Buffer.srel",
"Prims.l_Forall",
"Prims.nat",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"LowStar.Monotonic.Buffer.compatible_subseq_preorder",
"Prims.logical"
] | [] | false | false | false | true | true | let rrel_rel_always_compatible (#a: Type0) (rrel rel: srel a) =
| forall (len: nat) (i: nat) (j: nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel | false |
|
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.alloc_post_mem_common | val alloc_post_mem_common : b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem ->
s: FStar.Seq.Base.seq a
-> Prims.logical | let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 20,
"end_line": 2098,
"start_col": 0,
"start_line": 2091
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)} | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem ->
s: FStar.Seq.Base.seq a
-> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Monotonic.HyperStack.mem",
"FStar.Seq.Base.seq",
"Prims.l_and",
"LowStar.Monotonic.Buffer.live",
"LowStar.Monotonic.Buffer.unused_in",
"FStar.Set.equal",
"FStar.Monotonic.HyperHeap.rid",
"FStar.Map.domain",
"FStar.Monotonic.Heap.heap",
"FStar.Monotonic.HyperStack.get_hmap",
"Prims.eq2",
"FStar.Monotonic.HyperStack.get_tip",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_none",
"LowStar.Monotonic.Buffer.as_seq",
"Prims.logical"
] | [] | false | false | false | false | true | let alloc_post_mem_common
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h0 h1: HS.mem)
(s: Seq.seq a)
=
| live h1 b /\ unused_in b h0 /\
(Map.domain (HS.get_hmap h1)) `Set.equal` (Map.domain (HS.get_hmap h0)) /\
(HS.get_tip h1) == (HS.get_tip h0) /\ modifies loc_none h0 h1 /\ as_seq h1 b == s | false |
|
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.modifies_liveness_insensitive_mreference_weak | val modifies_liveness_insensitive_mreference_weak
(l: loc)
(h h': HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires
(modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ h `HS.contains` x)
)
(ensures (h' `HS.contains` x))
[
SMTPatOr
[
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h')];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h')]
]
] | val modifies_liveness_insensitive_mreference_weak
(l: loc)
(h h': HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires
(modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ h `HS.contains` x)
)
(ensures (h' `HS.contains` x))
[
SMTPatOr
[
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h')];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h')]
]
] | let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 62,
"end_line": 1194,
"start_col": 0,
"start_line": 1180
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]] | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
l: LowStar.Monotonic.Buffer.loc ->
h: FStar.Monotonic.HyperStack.mem ->
h': FStar.Monotonic.HyperStack.mem ->
x: FStar.Monotonic.HyperStack.mreference t pre
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.modifies l h h' /\
LowStar.Monotonic.Buffer.loc_includes LowStar.Monotonic.Buffer.address_liveness_insensitive_locs
l /\ FStar.Monotonic.HyperStack.contains h x)
(ensures FStar.Monotonic.HyperStack.contains h' x)
[
SMTPatOr [
[
SMTPat (FStar.Monotonic.HyperStack.contains h x);
SMTPat (LowStar.Monotonic.Buffer.modifies l h h')
];
[
SMTPat (FStar.Monotonic.HyperStack.contains h' x);
SMTPat (LowStar.Monotonic.Buffer.modifies l h h')
]
]
] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"FStar.Preorder.preorder",
"FStar.Monotonic.HyperStack.mreference",
"LowStar.Monotonic.Buffer.modifies_liveness_insensitive_mreference",
"LowStar.Monotonic.Buffer.loc_none",
"Prims.unit",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.address_liveness_insensitive_locs",
"FStar.Monotonic.HyperStack.contains",
"Prims.squash",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat_or",
"Prims.list",
"FStar.Pervasives.smt_pat",
"Prims.logical",
"Prims.Nil"
] | [] | true | false | true | false | false | let modifies_liveness_insensitive_mreference_weak
(l: loc)
(h h': HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires
(modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ h `HS.contains` x)
)
(ensures (h' `HS.contains` x))
[
SMTPatOr
[
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h')];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h')]
]
] =
| modifies_liveness_insensitive_mreference loc_none l h h' x | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.modifies_liveness_insensitive_region_mreference_weak | val modifies_liveness_insensitive_region_mreference_weak
(l2: loc)
(h h': HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires
(modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[
SMTPatOr
[
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))]
]
] | val modifies_liveness_insensitive_region_mreference_weak
(l2: loc)
(h h': HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires
(modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[
SMTPatOr
[
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))]
]
] | let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 70,
"end_line": 1274,
"start_col": 0,
"start_line": 1260
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
l2: LowStar.Monotonic.Buffer.loc ->
h: FStar.Monotonic.HyperStack.mem ->
h': FStar.Monotonic.HyperStack.mem ->
x: FStar.Monotonic.HyperStack.mreference t pre
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.modifies l2 h h' /\
LowStar.Monotonic.Buffer.loc_includes LowStar.Monotonic.Buffer.region_liveness_insensitive_locs
l2 /\ FStar.Monotonic.HyperStack.live_region h (FStar.Monotonic.HyperStack.frameOf x))
(ensures FStar.Monotonic.HyperStack.live_region h' (FStar.Monotonic.HyperStack.frameOf x))
[
SMTPatOr [
[
SMTPat (LowStar.Monotonic.Buffer.modifies l2 h h');
SMTPat (FStar.Monotonic.HyperStack.live_region h
(FStar.Monotonic.HyperStack.frameOf x))
];
[
SMTPat (LowStar.Monotonic.Buffer.modifies l2 h h');
SMTPat (FStar.Monotonic.HyperStack.live_region h'
(FStar.Monotonic.HyperStack.frameOf x))
]
]
] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"FStar.Preorder.preorder",
"FStar.Monotonic.HyperStack.mreference",
"LowStar.Monotonic.Buffer.modifies_liveness_insensitive_region_mreference",
"LowStar.Monotonic.Buffer.loc_none",
"Prims.unit",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.region_liveness_insensitive_locs",
"Prims.b2t",
"FStar.Monotonic.HyperStack.live_region",
"FStar.Monotonic.HyperStack.frameOf",
"Prims.squash",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat_or",
"Prims.list",
"FStar.Pervasives.smt_pat",
"Prims.bool",
"Prims.Nil"
] | [] | true | false | true | false | false | let modifies_liveness_insensitive_region_mreference_weak
(l2: loc)
(h h': HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires
(modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[
SMTPatOr
[
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))]
]
] =
| modifies_liveness_insensitive_region_mreference loc_none l2 h h' x | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.malloc_pre | val malloc_pre : r: FStar.Monotonic.HyperHeap.rid -> len: FStar.UInt32.t -> Prims.logical | let malloc_pre (r:HS.rid) (len:U32.t) = HST.is_eternal_region r /\ U32.v len > 0 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 87,
"end_line": 2110,
"start_col": 7,
"start_line": 2110
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s
(* Return type and post for partial allocation functions *)
unfold let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
unfold let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= (g_is_null b /\ h0 == h1) \/
((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: FStar.Monotonic.HyperHeap.rid -> len: FStar.UInt32.t -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"FStar.Monotonic.HyperHeap.rid",
"FStar.UInt32.t",
"Prims.l_and",
"FStar.HyperStack.ST.is_eternal_region",
"Prims.b2t",
"Prims.op_GreaterThan",
"FStar.UInt32.v",
"Prims.logical"
] | [] | false | false | false | true | true | let malloc_pre (r: HS.rid) (len: U32.t) =
| HST.is_eternal_region r /\ U32.v len > 0 | false |
|
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.lmbuffer | val lmbuffer : a: Type0 ->
rrel: LowStar.Monotonic.Buffer.srel a ->
rel: LowStar.Monotonic.Buffer.srel a ->
len: Prims.nat
-> Type0 | let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)} | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 62,
"end_line": 2088,
"start_col": 7,
"start_line": 2087
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
a: Type0 ->
rrel: LowStar.Monotonic.Buffer.srel a ->
rel: LowStar.Monotonic.Buffer.srel a ->
len: Prims.nat
-> Type0 | Prims.Tot | [
"total"
] | [] | [
"LowStar.Monotonic.Buffer.srel",
"Prims.nat",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null"
] | [] | false | false | false | true | true | let lmbuffer (a: Type0) (rrel rel: srel a) (len: nat) =
| b: mbuffer a rrel rel {length b == len /\ not (g_is_null b)} | false |
|
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.modifies_liveness_insensitive_region_weak | val modifies_liveness_insensitive_region_weak (l2: loc) (h h': HS.mem) (x: HS.rid)
: Lemma
(requires
(modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h x))
(ensures (HS.live_region h' x))
[
SMTPatOr
[
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)]
]
] | val modifies_liveness_insensitive_region_weak (l2: loc) (h h': HS.mem) (x: HS.rid)
: Lemma
(requires
(modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h x))
(ensures (HS.live_region h' x))
[
SMTPatOr
[
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)]
]
] | let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 57,
"end_line": 1258,
"start_col": 0,
"start_line": 1247
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]] | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
l2: LowStar.Monotonic.Buffer.loc ->
h: FStar.Monotonic.HyperStack.mem ->
h': FStar.Monotonic.HyperStack.mem ->
x: FStar.Monotonic.HyperHeap.rid
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.modifies l2 h h' /\
LowStar.Monotonic.Buffer.loc_includes LowStar.Monotonic.Buffer.region_liveness_insensitive_locs
l2 /\ FStar.Monotonic.HyperStack.live_region h x)
(ensures FStar.Monotonic.HyperStack.live_region h' x)
[
SMTPatOr [
[
SMTPat (LowStar.Monotonic.Buffer.modifies l2 h h');
SMTPat (FStar.Monotonic.HyperStack.live_region h x)
];
[
SMTPat (LowStar.Monotonic.Buffer.modifies l2 h h');
SMTPat (FStar.Monotonic.HyperStack.live_region h' x)
]
]
] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.modifies_liveness_insensitive_region",
"LowStar.Monotonic.Buffer.loc_none",
"Prims.unit",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.region_liveness_insensitive_locs",
"Prims.b2t",
"FStar.Monotonic.HyperStack.live_region",
"Prims.squash",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat_or",
"Prims.list",
"FStar.Pervasives.smt_pat",
"Prims.bool",
"Prims.Nil"
] | [] | true | false | true | false | false | let modifies_liveness_insensitive_region_weak (l2: loc) (h h': HS.mem) (x: HS.rid)
: Lemma
(requires
(modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h x))
(ensures (HS.live_region h' x))
[
SMTPatOr
[
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)]
]
] =
| modifies_liveness_insensitive_region loc_none l2 h h' x | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.modifies_liveness_insensitive_region_buffer_weak | val modifies_liveness_insensitive_region_buffer_weak
(l2: loc)
(h h': HS.mem)
(#a: Type0)
(#rrel #rel: srel a)
(x: mbuffer a rrel rel)
: Lemma
(requires
(modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[
SMTPatOr
[
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))]
]
] | val modifies_liveness_insensitive_region_buffer_weak
(l2: loc)
(h h': HS.mem)
(#a: Type0)
(#rrel #rel: srel a)
(x: mbuffer a rrel rel)
: Lemma
(requires
(modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[
SMTPatOr
[
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))]
]
] | let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 66,
"end_line": 1289,
"start_col": 0,
"start_line": 1276
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
l2: LowStar.Monotonic.Buffer.loc ->
h: FStar.Monotonic.HyperStack.mem ->
h': FStar.Monotonic.HyperStack.mem ->
x: LowStar.Monotonic.Buffer.mbuffer a rrel rel
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.modifies l2 h h' /\
LowStar.Monotonic.Buffer.loc_includes LowStar.Monotonic.Buffer.region_liveness_insensitive_locs
l2 /\ FStar.Monotonic.HyperStack.live_region h (LowStar.Monotonic.Buffer.frameOf x))
(ensures FStar.Monotonic.HyperStack.live_region h' (LowStar.Monotonic.Buffer.frameOf x))
[
SMTPatOr [
[
SMTPat (LowStar.Monotonic.Buffer.modifies l2 h h');
SMTPat (FStar.Monotonic.HyperStack.live_region h (LowStar.Monotonic.Buffer.frameOf x))
];
[
SMTPat (LowStar.Monotonic.Buffer.modifies l2 h h');
SMTPat (FStar.Monotonic.HyperStack.live_region h' (LowStar.Monotonic.Buffer.frameOf x)
)
]
]
] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.modifies_liveness_insensitive_region_buffer",
"LowStar.Monotonic.Buffer.loc_none",
"Prims.unit",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.region_liveness_insensitive_locs",
"Prims.b2t",
"FStar.Monotonic.HyperStack.live_region",
"LowStar.Monotonic.Buffer.frameOf",
"Prims.squash",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat_or",
"Prims.list",
"FStar.Pervasives.smt_pat",
"Prims.bool",
"Prims.Nil"
] | [] | true | false | true | false | false | let modifies_liveness_insensitive_region_buffer_weak
(l2: loc)
(h h': HS.mem)
(#a: Type0)
(#rrel #rel: srel a)
(x: mbuffer a rrel rel)
: Lemma
(requires
(modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[
SMTPatOr
[
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))]
]
] =
| modifies_liveness_insensitive_region_buffer loc_none l2 h h' x | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.lmbuffer_or_null | val lmbuffer_or_null : a: Type0 ->
rrel: LowStar.Monotonic.Buffer.srel a ->
rel: LowStar.Monotonic.Buffer.srel a ->
len: Prims.nat ->
r: FStar.Monotonic.HyperHeap.rid
-> Type0 | let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)} | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 85,
"end_line": 2102,
"start_col": 7,
"start_line": 2101
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
a: Type0 ->
rrel: LowStar.Monotonic.Buffer.srel a ->
rel: LowStar.Monotonic.Buffer.srel a ->
len: Prims.nat ->
r: FStar.Monotonic.HyperHeap.rid
-> Type0 | Prims.Tot | [
"total"
] | [] | [
"LowStar.Monotonic.Buffer.srel",
"Prims.nat",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.length",
"LowStar.Monotonic.Buffer.frameOf"
] | [] | false | false | false | true | true | let lmbuffer_or_null (a: Type0) (rrel rel: srel a) (len: nat) (r: HS.rid) =
| b: mbuffer a rrel rel {(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)} | false |
|
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.stable_on | val stable_on : p: LowStar.Monotonic.Buffer.spred a -> rel: LowStar.Monotonic.Buffer.srel a -> Prims.logical | let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 94,
"end_line": 1975,
"start_col": 7,
"start_line": 1974
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: LowStar.Monotonic.Buffer.spred a -> rel: LowStar.Monotonic.Buffer.srel a -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"LowStar.Monotonic.Buffer.spred",
"LowStar.Monotonic.Buffer.srel",
"Prims.l_Forall",
"FStar.Seq.Base.seq",
"Prims.l_imp",
"Prims.l_and",
"Prims.logical"
] | [] | false | false | false | true | true | let stable_on (#a: Type0) (p: spred a) (rel: srel a) =
| forall (s1: Seq.seq a) (s2: Seq.seq a). {:pattern (p s1); (rel s1 s2); (p s2)}
(p s1 /\ rel s1 s2) ==> p s2 | false |
|
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.alloca_pre | val alloca_pre : len: FStar.UInt32.t -> Prims.bool | let alloca_pre (len:U32.t) = U32.v len > 0 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 49,
"end_line": 2204,
"start_col": 7,
"start_line": 2204
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s
(* Return type and post for partial allocation functions *)
unfold let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
unfold let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= (g_is_null b /\ h0 == h1) \/
((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s)
unfold let malloc_pre (r:HS.rid) (len:U32.t) = HST.is_eternal_region r /\ U32.v len > 0
/// ``gcmalloc r init len`` allocates a memory-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer cannot be
/// freed. In fact, it is eternal: it cannot be deallocated at all.
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a memory-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mgcmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mgcmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mgcmalloc r init len
/// ``malloc r init len`` allocates a hand-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer can be
/// freed using ``free`` above. Note that the ``freeable`` permission is
/// only on the whole buffer ``b``, and is not inherited by any of its
/// strict sub-buffers.
(*
* See the Allocation comment above when changing the spec
*)
val mmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a hand-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mmalloc r init len
/// ``alloca init len`` allocates a buffer of some positive length ``len``
/// in the current stack frame. Every cell of this buffer will have
/// initial contents ``init``. Such a buffer cannot be freed
/// individually, but is automatically freed as soon as its stack
/// frame is deallocated by ``HST.pop_frame``. | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | len: FStar.UInt32.t -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.t",
"Prims.op_GreaterThan",
"FStar.UInt32.v",
"Prims.bool"
] | [] | false | false | false | true | false | let alloca_pre (len: U32.t) =
| U32.v len > 0 | false |
|
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.alloc_partial_post_mem_common | val alloc_partial_post_mem_common : b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem ->
s: FStar.Seq.Base.seq a
-> Prims.logical | let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= (g_is_null b /\ h0 == h1) \/
((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s) | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 60,
"end_line": 2107,
"start_col": 7,
"start_line": 2104
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s
(* Return type and post for partial allocation functions *)
unfold let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)} | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem ->
s: FStar.Seq.Base.seq a
-> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Monotonic.HyperStack.mem",
"FStar.Seq.Base.seq",
"Prims.l_or",
"Prims.l_and",
"Prims.b2t",
"LowStar.Monotonic.Buffer.g_is_null",
"Prims.eq2",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.alloc_post_mem_common",
"Prims.logical"
] | [] | false | false | false | false | true | let alloc_partial_post_mem_common
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h0 h1: HS.mem)
(s: Seq.seq a)
=
| (g_is_null b /\ h0 == h1) \/ ((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s) | false |
|
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.alloca_of_list_pre | val alloca_of_list_pre : init: Prims.list a -> Prims.logical | let alloca_of_list_pre (#a:Type0) (init:list a) =
normalize (0 < FStar.List.Tot.length init) /\
normalize (FStar.List.Tot.length init <= UInt.max_int 32) | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 59,
"end_line": 2240,
"start_col": 7,
"start_line": 2238
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s
(* Return type and post for partial allocation functions *)
unfold let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
unfold let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= (g_is_null b /\ h0 == h1) \/
((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s)
unfold let malloc_pre (r:HS.rid) (len:U32.t) = HST.is_eternal_region r /\ U32.v len > 0
/// ``gcmalloc r init len`` allocates a memory-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer cannot be
/// freed. In fact, it is eternal: it cannot be deallocated at all.
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a memory-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mgcmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mgcmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mgcmalloc r init len
/// ``malloc r init len`` allocates a hand-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer can be
/// freed using ``free`` above. Note that the ``freeable`` permission is
/// only on the whole buffer ``b``, and is not inherited by any of its
/// strict sub-buffers.
(*
* See the Allocation comment above when changing the spec
*)
val mmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a hand-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mmalloc r init len
/// ``alloca init len`` allocates a buffer of some positive length ``len``
/// in the current stack frame. Every cell of this buffer will have
/// initial contents ``init``. Such a buffer cannot be freed
/// individually, but is automatically freed as soon as its stack
/// frame is deallocated by ``HST.pop_frame``.
unfold let alloca_pre (len:U32.t) = U32.v len > 0
(*
* See the Allocation comment above when changing the spec
*)
val malloca (#a:Type0) (#rrel:srel a)
(init:a) (len:U32.t)
:HST.StackInline (lmbuffer a rrel rrel (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init) /\
frameOf b == HS.get_tip h0))
(*
* Allocate a stack buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val malloca_and_blit (#a:Type0) (#rrel:srel a)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.StackInline (lmbuffer a rrel rrel (U32.v len))
(requires fun h0 ->
alloca_pre len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)) /\
frameOf b == HS.get_tip h0)
/// ``alloca_of_list init`` allocates a buffer in the current stack
/// frame. The initial values of the cells of this buffer are
/// specified by the ``init`` list, which must be nonempty, and of
/// length representable as a machine integer. | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | init: Prims.list a -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"Prims.list",
"Prims.l_and",
"FStar.Pervasives.normalize",
"Prims.b2t",
"Prims.op_LessThan",
"FStar.List.Tot.Base.length",
"Prims.op_LessThanOrEqual",
"FStar.UInt.max_int",
"Prims.logical"
] | [] | false | false | false | true | true | let alloca_of_list_pre (#a: Type0) (init: list a) =
| normalize (0 < FStar.List.Tot.length init) /\
normalize (FStar.List.Tot.length init <= UInt.max_int 32) | false |
|
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.spred | val spred : a: Type0 -> Type | let spred (a:Type0) = Seq.seq a -> Type0 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 47,
"end_line": 1968,
"start_col": 7,
"start_line": 1968
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | a: Type0 -> Type | Prims.Tot | [
"total"
] | [] | [
"FStar.Seq.Base.seq"
] | [] | false | false | false | true | true | let spred (a: Type0) =
| Seq.seq a -> Type0 | false |
|
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.modifies_trans_linear | val modifies_trans_linear (l l_goal: loc) (h1 h2 h3: HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)] | val modifies_trans_linear (l l_goal: loc) (h1 h2 h3: HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)] | let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 36,
"end_line": 1307,
"start_col": 0,
"start_line": 1303
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
l: LowStar.Monotonic.Buffer.loc ->
l_goal: LowStar.Monotonic.Buffer.loc ->
h1: FStar.Monotonic.HyperStack.mem ->
h2: FStar.Monotonic.HyperStack.mem ->
h3: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.modifies l h1 h2 /\ LowStar.Monotonic.Buffer.modifies l_goal h2 h3 /\
LowStar.Monotonic.Buffer.loc_includes l_goal l)
(ensures LowStar.Monotonic.Buffer.modifies l_goal h1 h3)
[
SMTPat (LowStar.Monotonic.Buffer.modifies l h1 h2);
SMTPat (LowStar.Monotonic.Buffer.modifies l_goal h1 h3)
] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.modifies_trans",
"Prims.unit",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_includes",
"Prims.squash",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | true | false | true | false | false | let modifies_trans_linear (l l_goal: loc) (h1 h2 h3: HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)] =
| modifies_trans l h1 h2 l_goal h3 | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.gcmalloc_of_list_pre | val gcmalloc_of_list_pre : r: FStar.Monotonic.HyperHeap.rid -> init: Prims.list a -> Prims.logical | let gcmalloc_of_list_pre (#a:Type0) (r:HS.rid) (init:list a) =
HST.is_eternal_region r /\
normalize (FStar.List.Tot.length init <= UInt.max_int 32) | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 59,
"end_line": 2253,
"start_col": 7,
"start_line": 2251
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s
(* Return type and post for partial allocation functions *)
unfold let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
unfold let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= (g_is_null b /\ h0 == h1) \/
((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s)
unfold let malloc_pre (r:HS.rid) (len:U32.t) = HST.is_eternal_region r /\ U32.v len > 0
/// ``gcmalloc r init len`` allocates a memory-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer cannot be
/// freed. In fact, it is eternal: it cannot be deallocated at all.
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a memory-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mgcmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mgcmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mgcmalloc r init len
/// ``malloc r init len`` allocates a hand-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer can be
/// freed using ``free`` above. Note that the ``freeable`` permission is
/// only on the whole buffer ``b``, and is not inherited by any of its
/// strict sub-buffers.
(*
* See the Allocation comment above when changing the spec
*)
val mmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a hand-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mmalloc r init len
/// ``alloca init len`` allocates a buffer of some positive length ``len``
/// in the current stack frame. Every cell of this buffer will have
/// initial contents ``init``. Such a buffer cannot be freed
/// individually, but is automatically freed as soon as its stack
/// frame is deallocated by ``HST.pop_frame``.
unfold let alloca_pre (len:U32.t) = U32.v len > 0
(*
* See the Allocation comment above when changing the spec
*)
val malloca (#a:Type0) (#rrel:srel a)
(init:a) (len:U32.t)
:HST.StackInline (lmbuffer a rrel rrel (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init) /\
frameOf b == HS.get_tip h0))
(*
* Allocate a stack buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val malloca_and_blit (#a:Type0) (#rrel:srel a)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.StackInline (lmbuffer a rrel rrel (U32.v len))
(requires fun h0 ->
alloca_pre len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)) /\
frameOf b == HS.get_tip h0)
/// ``alloca_of_list init`` allocates a buffer in the current stack
/// frame. The initial values of the cells of this buffer are
/// specified by the ``init`` list, which must be nonempty, and of
/// length representable as a machine integer.
unfold let alloca_of_list_pre (#a:Type0) (init:list a) =
normalize (0 < FStar.List.Tot.length init) /\
normalize (FStar.List.Tot.length init <= UInt.max_int 32)
(*
* See the Allocation comment above when changing the spec
*)
val malloca_of_list (#a:Type0) (#rrel:srel a) (init: list a)
:HST.StackInline (lmbuffer a rrel rrel (normalize_term (List.Tot.length init)))
(requires (fun _ -> alloca_of_list_pre init))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.seq_of_list init) /\
frameOf b == HS.get_tip h0)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: FStar.Monotonic.HyperHeap.rid -> init: Prims.list a -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"FStar.Monotonic.HyperHeap.rid",
"Prims.list",
"Prims.l_and",
"FStar.HyperStack.ST.is_eternal_region",
"FStar.Pervasives.normalize",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.List.Tot.Base.length",
"FStar.UInt.max_int",
"Prims.logical"
] | [] | false | false | false | true | true | let gcmalloc_of_list_pre (#a: Type0) (r: HS.rid) (init: list a) =
| HST.is_eternal_region r /\ normalize (FStar.List.Tot.length init <= UInt.max_int 32) | false |
|
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.alloc_drgn_pre | val alloc_drgn_pre : h: FStar.Monotonic.HyperStack.mem -> d: FStar.HyperStack.ST.drgn -> len: FStar.UInt32.t
-> Prims.logical | let alloc_drgn_pre (h:HS.mem) (d:HST.drgn) (len:U32.t) = h `HS.live_region` (HST.rid_of_drgn d) /\ U32.v len > 0 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 119,
"end_line": 2275,
"start_col": 7,
"start_line": 2275
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s
(* Return type and post for partial allocation functions *)
unfold let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
unfold let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= (g_is_null b /\ h0 == h1) \/
((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s)
unfold let malloc_pre (r:HS.rid) (len:U32.t) = HST.is_eternal_region r /\ U32.v len > 0
/// ``gcmalloc r init len`` allocates a memory-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer cannot be
/// freed. In fact, it is eternal: it cannot be deallocated at all.
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a memory-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mgcmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mgcmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mgcmalloc r init len
/// ``malloc r init len`` allocates a hand-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer can be
/// freed using ``free`` above. Note that the ``freeable`` permission is
/// only on the whole buffer ``b``, and is not inherited by any of its
/// strict sub-buffers.
(*
* See the Allocation comment above when changing the spec
*)
val mmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a hand-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mmalloc r init len
/// ``alloca init len`` allocates a buffer of some positive length ``len``
/// in the current stack frame. Every cell of this buffer will have
/// initial contents ``init``. Such a buffer cannot be freed
/// individually, but is automatically freed as soon as its stack
/// frame is deallocated by ``HST.pop_frame``.
unfold let alloca_pre (len:U32.t) = U32.v len > 0
(*
* See the Allocation comment above when changing the spec
*)
val malloca (#a:Type0) (#rrel:srel a)
(init:a) (len:U32.t)
:HST.StackInline (lmbuffer a rrel rrel (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init) /\
frameOf b == HS.get_tip h0))
(*
* Allocate a stack buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val malloca_and_blit (#a:Type0) (#rrel:srel a)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.StackInline (lmbuffer a rrel rrel (U32.v len))
(requires fun h0 ->
alloca_pre len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)) /\
frameOf b == HS.get_tip h0)
/// ``alloca_of_list init`` allocates a buffer in the current stack
/// frame. The initial values of the cells of this buffer are
/// specified by the ``init`` list, which must be nonempty, and of
/// length representable as a machine integer.
unfold let alloca_of_list_pre (#a:Type0) (init:list a) =
normalize (0 < FStar.List.Tot.length init) /\
normalize (FStar.List.Tot.length init <= UInt.max_int 32)
(*
* See the Allocation comment above when changing the spec
*)
val malloca_of_list (#a:Type0) (#rrel:srel a) (init: list a)
:HST.StackInline (lmbuffer a rrel rrel (normalize_term (List.Tot.length init)))
(requires (fun _ -> alloca_of_list_pre init))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.seq_of_list init) /\
frameOf b == HS.get_tip h0))
unfold let gcmalloc_of_list_pre (#a:Type0) (r:HS.rid) (init:list a) =
HST.is_eternal_region r /\
normalize (FStar.List.Tot.length init <= UInt.max_int 32)
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc_of_list (#a:Type0) (#rrel:srel a) (r:HS.rid) (init:list a)
:HST.ST (b:lmbuffer a rrel rrel (normalize_term (List.Tot.length init)){frameOf b == r /\ recallable b})
(requires (fun _ -> gcmalloc_of_list_pre r init))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.seq_of_list init)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mgcmalloc_of_list_partial (#a:Type0) (#rrel:srel a) (r:HS.rid) (init:list a)
:HST.ST (b:lmbuffer_or_null a rrel rrel (normalize_term (List.Tot.length init)) r{recallable b})
(requires (fun _ -> gcmalloc_of_list_pre r init))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.seq_of_list init)))
= mgcmalloc_of_list r init | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | h: FStar.Monotonic.HyperStack.mem -> d: FStar.HyperStack.ST.drgn -> len: FStar.UInt32.t
-> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.drgn",
"FStar.UInt32.t",
"Prims.l_and",
"Prims.b2t",
"FStar.Monotonic.HyperStack.live_region",
"FStar.HyperStack.ST.rid_of_drgn",
"Prims.op_GreaterThan",
"FStar.UInt32.v",
"Prims.logical"
] | [] | false | false | false | true | true | let alloc_drgn_pre (h: HS.mem) (d: HST.drgn) (len: U32.t) =
| h `HS.live_region` (HST.rid_of_drgn d) /\ U32.v len > 0 | false |
|
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.fresh_frame_loc_not_unused_in_disjoint | val fresh_frame_loc_not_unused_in_disjoint (h0 h1: HS.mem)
: Lemma (requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)] | val fresh_frame_loc_not_unused_in_disjoint (h0 h1: HS.mem)
: Lemma (requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)] | let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1) | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 63,
"end_line": 1588,
"start_col": 0,
"start_line": 1582
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0))) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | h0: FStar.Monotonic.HyperStack.mem -> h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma (requires FStar.Monotonic.HyperStack.fresh_frame h0 h1)
(ensures
LowStar.Monotonic.Buffer.loc_disjoint (LowStar.Monotonic.Buffer.loc_region_only false
(FStar.Monotonic.HyperStack.get_tip h1))
(LowStar.Monotonic.Buffer.loc_not_unused_in h0))
[SMTPat (FStar.Monotonic.HyperStack.fresh_frame h0 h1)] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.not_live_region_loc_not_unused_in_disjoint",
"FStar.Monotonic.HyperStack.get_tip",
"Prims.unit",
"FStar.Monotonic.HyperStack.fresh_frame",
"Prims.squash",
"LowStar.Monotonic.Buffer.loc_disjoint",
"LowStar.Monotonic.Buffer.loc_region_only",
"LowStar.Monotonic.Buffer.loc_not_unused_in",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.logical",
"Prims.Nil"
] | [] | true | false | true | false | false | let fresh_frame_loc_not_unused_in_disjoint (h0 h1: HS.mem)
: Lemma (requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)] =
| not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1) | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.abuffer | val abuffer (region: HS.rid) (addr: nat) : Tot Type0 | val abuffer (region: HS.rid) (addr: nat) : Tot Type0 | let abuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (abuffer' region addr) | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 86,
"end_line": 2356,
"start_col": 0,
"start_line": 2356
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s
(* Return type and post for partial allocation functions *)
unfold let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
unfold let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= (g_is_null b /\ h0 == h1) \/
((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s)
unfold let malloc_pre (r:HS.rid) (len:U32.t) = HST.is_eternal_region r /\ U32.v len > 0
/// ``gcmalloc r init len`` allocates a memory-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer cannot be
/// freed. In fact, it is eternal: it cannot be deallocated at all.
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a memory-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mgcmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mgcmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mgcmalloc r init len
/// ``malloc r init len`` allocates a hand-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer can be
/// freed using ``free`` above. Note that the ``freeable`` permission is
/// only on the whole buffer ``b``, and is not inherited by any of its
/// strict sub-buffers.
(*
* See the Allocation comment above when changing the spec
*)
val mmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a hand-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mmalloc r init len
/// ``alloca init len`` allocates a buffer of some positive length ``len``
/// in the current stack frame. Every cell of this buffer will have
/// initial contents ``init``. Such a buffer cannot be freed
/// individually, but is automatically freed as soon as its stack
/// frame is deallocated by ``HST.pop_frame``.
unfold let alloca_pre (len:U32.t) = U32.v len > 0
(*
* See the Allocation comment above when changing the spec
*)
val malloca (#a:Type0) (#rrel:srel a)
(init:a) (len:U32.t)
:HST.StackInline (lmbuffer a rrel rrel (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init) /\
frameOf b == HS.get_tip h0))
(*
* Allocate a stack buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val malloca_and_blit (#a:Type0) (#rrel:srel a)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.StackInline (lmbuffer a rrel rrel (U32.v len))
(requires fun h0 ->
alloca_pre len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)) /\
frameOf b == HS.get_tip h0)
/// ``alloca_of_list init`` allocates a buffer in the current stack
/// frame. The initial values of the cells of this buffer are
/// specified by the ``init`` list, which must be nonempty, and of
/// length representable as a machine integer.
unfold let alloca_of_list_pre (#a:Type0) (init:list a) =
normalize (0 < FStar.List.Tot.length init) /\
normalize (FStar.List.Tot.length init <= UInt.max_int 32)
(*
* See the Allocation comment above when changing the spec
*)
val malloca_of_list (#a:Type0) (#rrel:srel a) (init: list a)
:HST.StackInline (lmbuffer a rrel rrel (normalize_term (List.Tot.length init)))
(requires (fun _ -> alloca_of_list_pre init))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.seq_of_list init) /\
frameOf b == HS.get_tip h0))
unfold let gcmalloc_of_list_pre (#a:Type0) (r:HS.rid) (init:list a) =
HST.is_eternal_region r /\
normalize (FStar.List.Tot.length init <= UInt.max_int 32)
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc_of_list (#a:Type0) (#rrel:srel a) (r:HS.rid) (init:list a)
:HST.ST (b:lmbuffer a rrel rrel (normalize_term (List.Tot.length init)){frameOf b == r /\ recallable b})
(requires (fun _ -> gcmalloc_of_list_pre r init))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.seq_of_list init)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mgcmalloc_of_list_partial (#a:Type0) (#rrel:srel a) (r:HS.rid) (init:list a)
:HST.ST (b:lmbuffer_or_null a rrel rrel (normalize_term (List.Tot.length init)) r{recallable b})
(requires (fun _ -> gcmalloc_of_list_pre r init))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.seq_of_list init)))
= mgcmalloc_of_list r init
unfold let alloc_drgn_pre (h:HS.mem) (d:HST.drgn) (len:U32.t) = h `HS.live_region` (HST.rid_of_drgn d) /\ U32.v len > 0
val mmalloc_drgn (#a:Type0) (#rrel:srel a)
(d:HST.drgn) (init:a) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == HST.rid_of_drgn d /\ region_lifetime_buf b})
(requires fun h -> alloc_drgn_pre h d len)
(ensures fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init))
val mmalloc_drgn_mm (#a:Type0) (#rrel:srel a)
(d:HST.drgn) (init:a) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == HST.rid_of_drgn d /\ freeable b})
(requires fun h -> alloc_drgn_pre h d len)
(ensures fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init))
val mmalloc_drgn_and_blit (#a:Type0) (#rrel:srel a)
(#rrel1 #rel1:srel a)
(d:HST.drgn) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == HST.rid_of_drgn d /\ region_lifetime_buf b})
(requires fun h ->
alloc_drgn_pre h d len /\
live h src /\
U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(***** End allocation functions *****)
/// Derived operations
val blit (#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(src:mbuffer a rrel1 rel1)
(idx_src:U32.t)
(dst:mbuffer a rrel2 rel2)
(idx_dst:U32.t)
(len:U32.t)
:HST.Stack unit (requires (fun h -> live h src /\ live h dst /\
U32.v idx_src + U32.v len <= length src /\
U32.v idx_dst + U32.v len <= length dst /\
(* TODO: remove the rhs part of this disjunction once patterns on loc_buffer_from_to are introduced *)
(loc_disjoint (loc_buffer_from_to src idx_src (idx_src `U32.add` len)) (loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) \/ disjoint src dst) /\
rel2 (as_seq h dst)
(Seq.replace_subseq (as_seq h dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len)
(Seq.slice (as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures (fun h _ h' -> modifies (loc_buffer dst) h h' /\
live h' dst /\
Seq.slice (as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len) /\
Seq.slice (as_seq h' dst) 0 (U32.v idx_dst) ==
Seq.slice (as_seq h dst) 0 (U32.v idx_dst) /\
Seq.slice (as_seq h' dst) (U32.v idx_dst + U32.v len) (length dst) ==
Seq.slice (as_seq h dst) (U32.v idx_dst + U32.v len) (length dst)))
val fill (#t:Type) (#rrel #rel: srel t)
(b: mbuffer t rrel rel)
(z:t)
(len:U32.t)
: HST.Stack unit
(requires (fun h ->
live h b /\
U32.v len <= length b /\
rel (as_seq h b) (Seq.replace_subseq (as_seq h b) 0 (U32.v len) (Seq.create (U32.v len) z))
))
(ensures (fun h0 _ h1 ->
modifies (loc_buffer b) h0 h1 /\
live h1 b /\
Seq.slice (as_seq h1 b) 0 (U32.v len) == Seq.create (U32.v len) z /\
Seq.slice (as_seq h1 b) (U32.v len) (length b) == Seq.slice (as_seq h0 b) (U32.v len) (length b)
))
/// Type class instantiation for compositionality with other kinds of memory locations than regions, references or buffers (just in case).
/// No usage pattern has been found yet.
module MG = FStar.ModifiesGen
val abuffer' (region: HS.rid) (addr: nat) : Tot Type0 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | region: FStar.Monotonic.HyperHeap.rid -> addr: Prims.nat -> Type0 | Prims.Tot | [
"total"
] | [] | [
"FStar.Monotonic.HyperHeap.rid",
"Prims.nat",
"FStar.Ghost.erased",
"LowStar.Monotonic.Buffer.abuffer'"
] | [] | false | false | false | true | true | let abuffer (region: HS.rid) (addr: nat) : Tot Type0 =
| G.erased (abuffer' region addr) | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.modifies_remove_fresh_frame | val modifies_remove_fresh_frame (h1 h2 h3: HS.mem) (l: loc)
: Lemma
(requires
(HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)] | val modifies_remove_fresh_frame (h1 h2 h3: HS.mem) (l: loc)
: Lemma
(requires
(HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)] | let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 39,
"end_line": 1720,
"start_col": 0,
"start_line": 1714
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
h1: FStar.Monotonic.HyperStack.mem ->
h2: FStar.Monotonic.HyperStack.mem ->
h3: FStar.Monotonic.HyperStack.mem ->
l: LowStar.Monotonic.Buffer.loc
-> FStar.Pervasives.Lemma
(requires
FStar.Monotonic.HyperStack.fresh_frame h1 h2 /\
LowStar.Monotonic.Buffer.modifies (LowStar.Monotonic.Buffer.loc_union (LowStar.Monotonic.Buffer.loc_all_regions_from
false
(FStar.Monotonic.HyperStack.get_tip h2))
l)
h2
h3)
(ensures LowStar.Monotonic.Buffer.modifies l h1 h3)
[
SMTPat (LowStar.Monotonic.Buffer.modifies l h1 h3);
SMTPat (FStar.Monotonic.HyperStack.fresh_frame h1 h2)
] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.loc",
"LowStar.Monotonic.Buffer.modifies_only_not_unused_in",
"Prims.unit",
"LowStar.Monotonic.Buffer.loc_regions_unused_in",
"FStar.Monotonic.HyperHeap.mod_set",
"FStar.Set.singleton",
"FStar.Monotonic.HyperHeap.rid",
"FStar.Monotonic.HyperStack.get_tip",
"Prims.l_and",
"FStar.Monotonic.HyperStack.fresh_frame",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_union",
"LowStar.Monotonic.Buffer.loc_all_regions_from",
"Prims.squash",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.logical",
"Prims.Nil"
] | [] | true | false | true | false | false | let modifies_remove_fresh_frame (h1 h2 h3: HS.mem) (l: loc)
: Lemma
(requires
(HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)] =
| loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3 | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.unused_in_not_unused_in_disjoint_2 | val unused_in_not_unused_in_disjoint_2 (l1 l2 l1' l2': loc) (h: HS.mem)
: Lemma
(requires
((loc_unused_in h) `loc_includes` l1 /\ (loc_not_unused_in h) `loc_includes` l2 /\
l1 `loc_includes` l1' /\ l2 `loc_includes` l2'))
(ensures (loc_disjoint l1' l2'))
[
SMTPat (loc_disjoint l1' l2');
SMTPat ((loc_unused_in h) `loc_includes` l1);
SMTPat ((loc_not_unused_in h) `loc_includes` l2)
] | val unused_in_not_unused_in_disjoint_2 (l1 l2 l1' l2': loc) (h: HS.mem)
: Lemma
(requires
((loc_unused_in h) `loc_includes` l1 /\ (loc_not_unused_in h) `loc_includes` l2 /\
l1 `loc_includes` l1' /\ l2 `loc_includes` l2'))
(ensures (loc_disjoint l1' l2'))
[
SMTPat (loc_disjoint l1' l2');
SMTPat ((loc_unused_in h) `loc_includes` l1);
SMTPat ((loc_not_unused_in h) `loc_includes` l2)
] | let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2' | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 71,
"end_line": 1654,
"start_col": 0,
"start_line": 1644
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]] | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
l1: LowStar.Monotonic.Buffer.loc ->
l2: LowStar.Monotonic.Buffer.loc ->
l1': LowStar.Monotonic.Buffer.loc ->
l2': LowStar.Monotonic.Buffer.loc ->
h: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_unused_in h) l1 /\
LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_not_unused_in h) l2 /\
LowStar.Monotonic.Buffer.loc_includes l1 l1' /\ LowStar.Monotonic.Buffer.loc_includes l2 l2'
)
(ensures LowStar.Monotonic.Buffer.loc_disjoint l1' l2')
[
SMTPat (LowStar.Monotonic.Buffer.loc_disjoint l1' l2');
SMTPat (LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_unused_in h) l1);
SMTPat (LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_not_unused_in h)
l2)
] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.loc_disjoint_includes",
"LowStar.Monotonic.Buffer.loc_unused_in",
"LowStar.Monotonic.Buffer.loc_not_unused_in",
"Prims.unit",
"LowStar.Monotonic.Buffer.loc_unused_in_not_unused_in_disjoint",
"LowStar.Monotonic.Buffer.loc_includes_trans",
"Prims.l_and",
"LowStar.Monotonic.Buffer.loc_includes",
"Prims.squash",
"LowStar.Monotonic.Buffer.loc_disjoint",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | true | false | true | false | false | let unused_in_not_unused_in_disjoint_2 (l1 l2 l1' l2': loc) (h: HS.mem)
: Lemma
(requires
((loc_unused_in h) `loc_includes` l1 /\ (loc_not_unused_in h) `loc_includes` l2 /\
l1 `loc_includes` l1' /\ l2 `loc_includes` l2'))
(ensures (loc_disjoint l1' l2'))
[
SMTPat (loc_disjoint l1' l2');
SMTPat ((loc_unused_in h) `loc_includes` l1);
SMTPat ((loc_not_unused_in h) `loc_includes` l2)
] =
| loc_includes_trans (loc_unused_in h) l1 l1';
loc_includes_trans (loc_not_unused_in h) l2 l2';
loc_unused_in_not_unused_in_disjoint h;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2' | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.g_upd | val g_upd
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(i: nat{i < length b})
(v: a)
(h: HS.mem{live h b})
: GTot HS.mem | val g_upd
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(i: nat{i < length b})
(v: a)
(h: HS.mem{live h b})
: GTot HS.mem | let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 44,
"end_line": 1874,
"start_col": 0,
"start_line": 1868
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below. | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
i: Prims.nat{i < LowStar.Monotonic.Buffer.length b} ->
v: a ->
h: FStar.Monotonic.HyperStack.mem{LowStar.Monotonic.Buffer.live h b}
-> Prims.GTot FStar.Monotonic.HyperStack.mem | Prims.GTot | [
"sometrivial"
] | [] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThan",
"LowStar.Monotonic.Buffer.length",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.live",
"LowStar.Monotonic.Buffer.g_upd_seq",
"FStar.Seq.Base.upd",
"LowStar.Monotonic.Buffer.as_seq"
] | [] | false | false | false | false | false | let g_upd
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(i: nat{i < length b})
(v: a)
(h: HS.mem{live h b})
: GTot HS.mem =
| g_upd_seq b (Seq.upd (as_seq h b) i v) h | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.deref | val deref : h: FStar.Monotonic.HyperStack.mem -> x: LowStar.Monotonic.Buffer.mpointer a rrel rel -> Prims.GTot a | let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 11,
"end_line": 1777,
"start_col": 0,
"start_line": 1776
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1} | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | h: FStar.Monotonic.HyperStack.mem -> x: LowStar.Monotonic.Buffer.mpointer a rrel rel -> Prims.GTot a | Prims.GTot | [
"sometrivial"
] | [] | [
"LowStar.Monotonic.Buffer.srel",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.mpointer",
"LowStar.Monotonic.Buffer.get"
] | [] | false | false | false | false | false | let deref (#a: Type0) (#rrel #rel: srel a) (h: HS.mem) (x: mpointer a rrel rel) =
| get h x 0 | false |
|
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.freeable_disjoint' | val freeable_disjoint'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
: Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)] | val freeable_disjoint'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
: Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)] | let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 27,
"end_line": 2052,
"start_col": 0,
"start_line": 2047
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
b1: LowStar.Monotonic.Buffer.mbuffer a1 rrel1 rel1 ->
b2: LowStar.Monotonic.Buffer.mbuffer a2 rrel2 rel2
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.freeable b1 /\ LowStar.Monotonic.Buffer.length b2 > 0 /\
LowStar.Monotonic.Buffer.disjoint b1 b2)
(ensures
LowStar.Monotonic.Buffer.loc_disjoint (LowStar.Monotonic.Buffer.loc_addr_of_buffer b1)
(LowStar.Monotonic.Buffer.loc_addr_of_buffer b2))
[
SMTPat (LowStar.Monotonic.Buffer.freeable b1);
SMTPat (LowStar.Monotonic.Buffer.disjoint b1 b2)
] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.freeable_disjoint",
"Prims.unit",
"Prims.l_and",
"LowStar.Monotonic.Buffer.freeable",
"Prims.b2t",
"Prims.op_GreaterThan",
"LowStar.Monotonic.Buffer.length",
"LowStar.Monotonic.Buffer.disjoint",
"Prims.squash",
"LowStar.Monotonic.Buffer.loc_disjoint",
"LowStar.Monotonic.Buffer.loc_addr_of_buffer",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | true | false | true | false | false | let freeable_disjoint'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
: Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)] =
| freeable_disjoint b1 b2 | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.upd | val upd (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (i: U32.t) (v: a)
: HST.Stack unit
(requires
(fun h ->
live h b /\ U32.v i < length b /\ rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures
(fun h _ h' ->
(not (g_is_null b)) /\ modifies (loc_buffer b) h h' /\ live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v)) | val upd (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (i: U32.t) (v: a)
: HST.Stack unit
(requires
(fun h ->
live h b /\ U32.v i < length b /\ rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures
(fun h _ h' ->
(not (g_is_null b)) /\ modifies (loc_buffer b) h h' /\ live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v)) | let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 59,
"end_line": 1909,
"start_col": 0,
"start_line": 1896
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> i: FStar.UInt32.t -> v: a
-> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.g_upd_seq_as_seq",
"FStar.Seq.Base.upd",
"LowStar.Monotonic.Buffer.as_seq",
"FStar.UInt32.v",
"Prims.unit",
"LowStar.Monotonic.Buffer.upd'",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"Prims.l_and",
"LowStar.Monotonic.Buffer.live",
"Prims.b2t",
"Prims.op_LessThan",
"LowStar.Monotonic.Buffer.length",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_buffer",
"Prims.eq2",
"FStar.Seq.Base.seq"
] | [] | false | true | false | false | false | let upd (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (i: U32.t) (v: a)
: HST.Stack unit
(requires
(fun h ->
live h b /\ U32.v i < length b /\ rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures
(fun h _ h' ->
(not (g_is_null b)) /\ modifies (loc_buffer b) h h' /\ live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v)) =
| let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h | false |
Pulse.Checker.Exists.fsti | Pulse.Checker.Exists.intro_exists_witness_singleton | val intro_exists_witness_singleton : st: Pulse.Syntax.Base.st_term -> Prims.bool | let intro_exists_witness_singleton (st:st_term) =
match st.term with
| Tm_IntroExists { witnesses = [_] } -> true
| _ -> false | {
"file_name": "lib/steel/pulse/Pulse.Checker.Exists.fsti",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 14,
"end_line": 37,
"start_col": 0,
"start_line": 34
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Exists
module T = FStar.Tactics.V2
open Pulse.Syntax
open Pulse.Typing
open Pulse.Checker.Base
val check_elim_exists
(g:env)
(pre:term)
(pre_typing:tot_typing g pre tm_vprop)
(post_hint:post_hint_opt g)
(res_ppname:ppname)
(t:st_term{Tm_ElimExists? t.term})
: T.Tac (checker_result_t g pre post_hint) | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Base.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Checker.Exists.fsti"
} | [
{
"abbrev": false,
"full_module": "Pulse.Checker.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": false,
"full_module": "Pulse.Checker",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | st: Pulse.Syntax.Base.st_term -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"Pulse.Syntax.Base.st_term",
"Pulse.Syntax.Base.__proj__Mkst_term__item__term",
"Pulse.Syntax.Base.vprop",
"Pulse.Syntax.Base.term",
"Pulse.Syntax.Base.st_term'",
"Prims.bool"
] | [] | false | false | false | true | false | let intro_exists_witness_singleton (st: st_term) =
| match st.term with
| Tm_IntroExists { witnesses = [_] } -> true
| _ -> false | false |
|
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.mgcmalloc_of_list_partial | val mgcmalloc_of_list_partial (#a: Type0) (#rrel: srel a) (r: HS.rid) (init: list a)
: HST.ST
(b: lmbuffer_or_null a rrel rrel (normalize_term (List.Tot.length init)) r {recallable b})
(requires (fun _ -> gcmalloc_of_list_pre r init))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.seq_of_list init))) | val mgcmalloc_of_list_partial (#a: Type0) (#rrel: srel a) (r: HS.rid) (init: list a)
: HST.ST
(b: lmbuffer_or_null a rrel rrel (normalize_term (List.Tot.length init)) r {recallable b})
(requires (fun _ -> gcmalloc_of_list_pre r init))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.seq_of_list init))) | let mgcmalloc_of_list_partial (#a:Type0) (#rrel:srel a) (r:HS.rid) (init:list a)
:HST.ST (b:lmbuffer_or_null a rrel rrel (normalize_term (List.Tot.length init)) r{recallable b})
(requires (fun _ -> gcmalloc_of_list_pre r init))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.seq_of_list init)))
= mgcmalloc_of_list r init | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 28,
"end_line": 2272,
"start_col": 0,
"start_line": 2267
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s
(* Return type and post for partial allocation functions *)
unfold let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
unfold let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= (g_is_null b /\ h0 == h1) \/
((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s)
unfold let malloc_pre (r:HS.rid) (len:U32.t) = HST.is_eternal_region r /\ U32.v len > 0
/// ``gcmalloc r init len`` allocates a memory-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer cannot be
/// freed. In fact, it is eternal: it cannot be deallocated at all.
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a memory-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mgcmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mgcmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mgcmalloc r init len
/// ``malloc r init len`` allocates a hand-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer can be
/// freed using ``free`` above. Note that the ``freeable`` permission is
/// only on the whole buffer ``b``, and is not inherited by any of its
/// strict sub-buffers.
(*
* See the Allocation comment above when changing the spec
*)
val mmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a hand-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mmalloc r init len
/// ``alloca init len`` allocates a buffer of some positive length ``len``
/// in the current stack frame. Every cell of this buffer will have
/// initial contents ``init``. Such a buffer cannot be freed
/// individually, but is automatically freed as soon as its stack
/// frame is deallocated by ``HST.pop_frame``.
unfold let alloca_pre (len:U32.t) = U32.v len > 0
(*
* See the Allocation comment above when changing the spec
*)
val malloca (#a:Type0) (#rrel:srel a)
(init:a) (len:U32.t)
:HST.StackInline (lmbuffer a rrel rrel (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init) /\
frameOf b == HS.get_tip h0))
(*
* Allocate a stack buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val malloca_and_blit (#a:Type0) (#rrel:srel a)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.StackInline (lmbuffer a rrel rrel (U32.v len))
(requires fun h0 ->
alloca_pre len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)) /\
frameOf b == HS.get_tip h0)
/// ``alloca_of_list init`` allocates a buffer in the current stack
/// frame. The initial values of the cells of this buffer are
/// specified by the ``init`` list, which must be nonempty, and of
/// length representable as a machine integer.
unfold let alloca_of_list_pre (#a:Type0) (init:list a) =
normalize (0 < FStar.List.Tot.length init) /\
normalize (FStar.List.Tot.length init <= UInt.max_int 32)
(*
* See the Allocation comment above when changing the spec
*)
val malloca_of_list (#a:Type0) (#rrel:srel a) (init: list a)
:HST.StackInline (lmbuffer a rrel rrel (normalize_term (List.Tot.length init)))
(requires (fun _ -> alloca_of_list_pre init))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.seq_of_list init) /\
frameOf b == HS.get_tip h0))
unfold let gcmalloc_of_list_pre (#a:Type0) (r:HS.rid) (init:list a) =
HST.is_eternal_region r /\
normalize (FStar.List.Tot.length init <= UInt.max_int 32)
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc_of_list (#a:Type0) (#rrel:srel a) (r:HS.rid) (init:list a)
:HST.ST (b:lmbuffer a rrel rrel (normalize_term (List.Tot.length init)){frameOf b == r /\ recallable b})
(requires (fun _ -> gcmalloc_of_list_pre r init))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.seq_of_list init)))
(*
* See the Allocation comment above when changing the spec
*) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: FStar.Monotonic.HyperHeap.rid -> init: Prims.list a
-> FStar.HyperStack.ST.ST
(b:
LowStar.Monotonic.Buffer.lmbuffer_or_null a
rrel
rrel
(FStar.Pervasives.normalize_term (FStar.List.Tot.Base.length init))
r {LowStar.Monotonic.Buffer.recallable b}) | FStar.HyperStack.ST.ST | [] | [] | [
"LowStar.Monotonic.Buffer.srel",
"FStar.Monotonic.HyperHeap.rid",
"Prims.list",
"LowStar.Monotonic.Buffer.mgcmalloc_of_list",
"LowStar.Monotonic.Buffer.lmbuffer",
"FStar.Pervasives.normalize_term",
"Prims.nat",
"FStar.List.Tot.Base.length",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"LowStar.Monotonic.Buffer.lmbuffer_or_null",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.gcmalloc_of_list_pre",
"LowStar.Monotonic.Buffer.alloc_partial_post_mem_common",
"FStar.Seq.Base.seq_of_list"
] | [] | false | true | false | false | false | let mgcmalloc_of_list_partial (#a: Type0) (#rrel: srel a) (r: HS.rid) (init: list a)
: HST.ST
(b: lmbuffer_or_null a rrel rrel (normalize_term (List.Tot.length init)) r {recallable b})
(requires (fun _ -> gcmalloc_of_list_pre r init))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.seq_of_list init))) =
| mgcmalloc_of_list r init | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.mmalloc_partial | val mmalloc_partial (#a: Type0) (#rrel: srel a) (r: HS.rid) (init: a) (len: U32.t)
: HST.ST (b: lmbuffer_or_null a rrel rrel (U32.v len) r {(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) | val mmalloc_partial (#a: Type0) (#rrel: srel a) (r: HS.rid) (init: a) (len: U32.t)
: HST.ST (b: lmbuffer_or_null a rrel rrel (U32.v len) r {(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) | let mmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mmalloc r init len | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 22,
"end_line": 2195,
"start_col": 0,
"start_line": 2190
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s
(* Return type and post for partial allocation functions *)
unfold let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
unfold let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= (g_is_null b /\ h0 == h1) \/
((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s)
unfold let malloc_pre (r:HS.rid) (len:U32.t) = HST.is_eternal_region r /\ U32.v len > 0
/// ``gcmalloc r init len`` allocates a memory-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer cannot be
/// freed. In fact, it is eternal: it cannot be deallocated at all.
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a memory-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mgcmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mgcmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mgcmalloc r init len
/// ``malloc r init len`` allocates a hand-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer can be
/// freed using ``free`` above. Note that the ``freeable`` permission is
/// only on the whole buffer ``b``, and is not inherited by any of its
/// strict sub-buffers.
(*
* See the Allocation comment above when changing the spec
*)
val mmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a hand-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: FStar.Monotonic.HyperHeap.rid -> init: a -> len: FStar.UInt32.t
-> FStar.HyperStack.ST.ST
(b:
LowStar.Monotonic.Buffer.lmbuffer_or_null a rrel rrel (FStar.UInt32.v len) r
{ Prims.op_Negation (LowStar.Monotonic.Buffer.g_is_null b) ==>
LowStar.Monotonic.Buffer.freeable b }) | FStar.HyperStack.ST.ST | [] | [] | [
"LowStar.Monotonic.Buffer.srel",
"FStar.Monotonic.HyperHeap.rid",
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.mmalloc",
"LowStar.Monotonic.Buffer.lmbuffer",
"FStar.UInt32.v",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.freeable",
"LowStar.Monotonic.Buffer.lmbuffer_or_null",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.malloc_pre",
"LowStar.Monotonic.Buffer.alloc_partial_post_mem_common",
"FStar.Seq.Base.create"
] | [] | false | true | false | false | false | let mmalloc_partial (#a: Type0) (#rrel: srel a) (r: HS.rid) (init: a) (len: U32.t)
: HST.ST (b: lmbuffer_or_null a rrel rrel (U32.v len) r {(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) =
| mmalloc r init len | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.is_fodd | val is_fodd (x: nat) : bool | val is_fodd (x: nat) : bool | let is_fodd (x:nat) : bool = x % 2 = 1 | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 29,
"start_col": 0,
"start_line": 29
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: Prims.nat -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"Prims.nat",
"Prims.op_Equality",
"Prims.int",
"Prims.op_Modulus",
"Prims.bool"
] | [] | false | false | false | true | false | let is_fodd (x: nat) : bool =
| x % 2 = 1 | false |
LowStar.Monotonic.Buffer.fsti | LowStar.Monotonic.Buffer.mgcmalloc_partial | val mgcmalloc_partial (#a: Type0) (#rrel: srel a) (r: HS.rid) (init: a) (len: U32.t)
: HST.ST (b: lmbuffer_or_null a rrel rrel (U32.v len) r {recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) | val mgcmalloc_partial (#a: Type0) (#rrel: srel a) (r: HS.rid) (init: a) (len: U32.t)
: HST.ST (b: lmbuffer_or_null a rrel rrel (U32.v len) r {recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) | let mgcmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mgcmalloc r init len | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 24,
"end_line": 2152,
"start_col": 0,
"start_line": 2147
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s
(* Return type and post for partial allocation functions *)
unfold let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
unfold let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= (g_is_null b /\ h0 == h1) \/
((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s)
unfold let malloc_pre (r:HS.rid) (len:U32.t) = HST.is_eternal_region r /\ U32.v len > 0
/// ``gcmalloc r init len`` allocates a memory-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer cannot be
/// freed. In fact, it is eternal: it cannot be deallocated at all.
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a memory-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mgcmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: FStar.Monotonic.HyperHeap.rid -> init: a -> len: FStar.UInt32.t
-> FStar.HyperStack.ST.ST
(b:
LowStar.Monotonic.Buffer.lmbuffer_or_null a rrel rrel (FStar.UInt32.v len) r
{LowStar.Monotonic.Buffer.recallable b}) | FStar.HyperStack.ST.ST | [] | [] | [
"LowStar.Monotonic.Buffer.srel",
"FStar.Monotonic.HyperHeap.rid",
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.mgcmalloc",
"LowStar.Monotonic.Buffer.lmbuffer",
"FStar.UInt32.v",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"LowStar.Monotonic.Buffer.lmbuffer_or_null",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.malloc_pre",
"LowStar.Monotonic.Buffer.alloc_partial_post_mem_common",
"FStar.Seq.Base.create"
] | [] | false | true | false | false | false | let mgcmalloc_partial (#a: Type0) (#rrel: srel a) (r: HS.rid) (init: a) (len: U32.t)
: HST.ST (b: lmbuffer_or_null a rrel rrel (U32.v len) r {recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) =
| mgcmalloc r init len | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.felem | val felem : Type0 | let felem = x:nat{x < prime} | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 28,
"end_line": 20,
"start_col": 0,
"start_line": 20
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Type0 | Prims.Tot | [
"total"
] | [] | [
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThan",
"Spec.K256.PointOps.prime"
] | [] | false | false | false | true | true | let felem =
| x: nat{x < prime} | false |
|
Spec.K256.PointOps.fst | Spec.K256.PointOps.qelem | val qelem : Type0 | let qelem = x:nat{x < q} | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 24,
"end_line": 43,
"start_col": 0,
"start_line": 43
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Type0 | Prims.Tot | [
"total"
] | [] | [
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThan",
"Spec.K256.PointOps.q"
] | [] | false | false | false | true | true | let qelem =
| x: nat{x < q} | false |
|
Spec.K256.PointOps.fst | Spec.K256.PointOps.q | val q:q: pos{q < pow2 256} | val q:q: pos{q < pow2 256} | let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 68,
"end_line": 41,
"start_col": 0,
"start_line": 39
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | q: Prims.pos{q < Prims.pow2 256} | Prims.Tot | [
"total"
] | [] | [
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.b2t",
"Prims.op_LessThan",
"Prims.pow2"
] | [] | false | false | false | false | false | let q:q: pos{q < pow2 256} =
| assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.proj_point | val proj_point : Type0 | let proj_point = felem & felem & felem | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 56,
"start_col": 0,
"start_line": 56
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q}
let qadd (x y:qelem) : qelem = (x + y) % q
let qmul (x y:qelem) : qelem = (x * y) % q
let qinv (x:qelem) : qelem = M.pow_mod #q x (q - 2)
let qnegate (x:qelem) : qelem = (- x) % q
let scalar_is_high (x:qelem) : bool = x > q / 2
let ( +^ ) = qadd
let ( *^ ) = qmul
/// Elliptic curve | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Type0 | Prims.Tot | [
"total"
] | [] | [
"FStar.Pervasives.Native.tuple3",
"Spec.K256.PointOps.felem"
] | [] | false | false | false | true | true | let proj_point =
| felem & felem & felem | false |
|
Spec.K256.PointOps.fst | Spec.K256.PointOps.aff_point | val aff_point : Type0 | let aff_point = felem & felem | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 29,
"end_line": 55,
"start_col": 0,
"start_line": 55
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q}
let qadd (x y:qelem) : qelem = (x + y) % q
let qmul (x y:qelem) : qelem = (x * y) % q
let qinv (x:qelem) : qelem = M.pow_mod #q x (q - 2)
let qnegate (x:qelem) : qelem = (- x) % q
let scalar_is_high (x:qelem) : bool = x > q / 2
let ( +^ ) = qadd
let ( *^ ) = qmul
/// Elliptic curve | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Type0 | Prims.Tot | [
"total"
] | [] | [
"FStar.Pervasives.Native.tuple2",
"Spec.K256.PointOps.felem"
] | [] | false | false | false | true | true | let aff_point =
| felem & felem | false |
|
Spec.K256.PointOps.fst | Spec.K256.PointOps.scalar_is_high | val scalar_is_high (x: qelem) : bool | val scalar_is_high (x: qelem) : bool | let scalar_is_high (x:qelem) : bool = x > q / 2 | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 47,
"end_line": 48,
"start_col": 0,
"start_line": 48
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q}
let qadd (x y:qelem) : qelem = (x + y) % q
let qmul (x y:qelem) : qelem = (x * y) % q
let qinv (x:qelem) : qelem = M.pow_mod #q x (q - 2) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: Spec.K256.PointOps.qelem -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"Spec.K256.PointOps.qelem",
"Prims.op_GreaterThan",
"Prims.op_Division",
"Spec.K256.PointOps.q",
"Prims.bool"
] | [] | false | false | false | true | false | let scalar_is_high (x: qelem) : bool =
| x > q / 2 | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.aff_point_at_inf | val aff_point_at_inf:aff_point | val aff_point_at_inf:aff_point | let aff_point_at_inf : aff_point = (zero, zero) | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 47,
"end_line": 64,
"start_col": 0,
"start_line": 64
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q}
let qadd (x y:qelem) : qelem = (x + y) % q
let qmul (x y:qelem) : qelem = (x * y) % q
let qinv (x:qelem) : qelem = M.pow_mod #q x (q - 2)
let qnegate (x:qelem) : qelem = (- x) % q
let scalar_is_high (x:qelem) : bool = x > q / 2
let ( +^ ) = qadd
let ( *^ ) = qmul
/// Elliptic curve
let aff_point = felem & felem // Affine point
let proj_point = felem & felem & felem // Projective coordinates
// y * y = x * x * x + b
let b : felem = 7
let is_on_curve (p:aff_point) =
let x, y = p in y *% y = x *% x *% x +% b | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Spec.K256.PointOps.aff_point | Prims.Tot | [
"total"
] | [] | [
"FStar.Pervasives.Native.Mktuple2",
"Spec.K256.PointOps.felem",
"Spec.K256.PointOps.zero"
] | [] | false | false | false | true | false | let aff_point_at_inf:aff_point =
| (zero, zero) | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.point_at_inf | val point_at_inf:proj_point | val point_at_inf:proj_point | let point_at_inf : proj_point = (zero, one, zero) | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 49,
"end_line": 65,
"start_col": 0,
"start_line": 65
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q}
let qadd (x y:qelem) : qelem = (x + y) % q
let qmul (x y:qelem) : qelem = (x * y) % q
let qinv (x:qelem) : qelem = M.pow_mod #q x (q - 2)
let qnegate (x:qelem) : qelem = (- x) % q
let scalar_is_high (x:qelem) : bool = x > q / 2
let ( +^ ) = qadd
let ( *^ ) = qmul
/// Elliptic curve
let aff_point = felem & felem // Affine point
let proj_point = felem & felem & felem // Projective coordinates
// y * y = x * x * x + b
let b : felem = 7
let is_on_curve (p:aff_point) =
let x, y = p in y *% y = x *% x *% x +% b | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Spec.K256.PointOps.proj_point | Prims.Tot | [
"total"
] | [] | [
"FStar.Pervasives.Native.Mktuple3",
"Spec.K256.PointOps.felem",
"Spec.K256.PointOps.zero",
"Spec.K256.PointOps.one"
] | [] | false | false | false | true | false | let point_at_inf:proj_point =
| (zero, one, zero) | false |
Spec.Agile.Hash.fsti | Spec.Agile.Hash.hash | val hash : a: Spec.Hash.Definitions.fixed_len_alg ->
input:
Spec.Hash.Definitions.bytes
{Spec.Hash.Definitions.less_than_max_input_length (FStar.Seq.Base.length input) a}
-> Lib.ByteSequence.lbytes (Spec.Hash.Definitions.hash_length' a ()) | let hash (a:fixed_len_alg) (input:bytes{S.length input `less_than_max_input_length` a}) =
hash' a input () | {
"file_name": "specs/Spec.Agile.Hash.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 18,
"end_line": 93,
"start_col": 0,
"start_line": 92
} | module Spec.Agile.Hash
module S = FStar.Seq
include Spec.Hash.Definitions
open FStar.Mul
(** Hashes, agility, incrementality, streaming, and hash laws.
For various historical reasons, this module serves two purposes.
- For Merkle-Damgård algorithms (MD5, SHA1, SHA2), this module acts as a
*definitional* specification. This *is* the spec of the MD algorithms, and
low-level implementations (at least, historically) we shown to refine this
specification.
- For non-MD algorithms (Blake2, SHA3), this module serves a different purpose:
it shows that Blake2 and SHA3 obey the hash laws (more on that below), and
that therefore they can be suitably interpreted as behaving like hash
algorithms in this agile specification. The agile hash therefore obeys the
hash laws, because every algorithm does.
This agile specification, in addition to establishing the high-level property that
"all hash algorithms behave like hashes" (i.e., obey the hash laws), serves as
a specification of the agile, multiplexing hash known as EverCrypt.Hash.
The hash laws are as follows.
- Any hash algorithm can be decomposed into an *incremental* specification,
relying on: init, update_multi, update_last, finish. (The MD hashes
specifically decompose update_last as padding + update but this is not
generally true of all hashes.) See
Spec.Hash.Incremental.Definitions.hash_incremental, along with the various
proofs in Spec.Hash.Incremental.X that algorithm X is equivalent to its
incremental specification.
- The update_multi function, which processes n full blocks into the internal
hash state (also known as the accumulator, borrowing from functional
programming terminology for folds), takes the empty input as its neutral element.
Concretely:
update_multi acc empty == acc
- The update_multi function is associative. Concretely:
update_multi (update_multi acc blocks) blocks' == update_multi acc (blocks @ blocks')
Proving the three hash laws is important: they are needed by the streaming
functor (which turns a block-by-block implementation into a buffered
implementation that can take arbitrary amounts of data) for functional
correctness.
(In the case of MD hashes, the proof of incrementality specifically relies on
the two properties of update_multi, but this is not true in the general case.)
The incremental specification (in lemmas/Spec.Hash.Incremental.Definitions)
introduces a notion of "update_last" and then defines the hash as update_multi,
update_last, finish.
*)
val init (a:hash_alg): init_t a
let init_extra_state (a:hash_alg) : extra_state a = match a with
| Blake2B | Blake2S -> 0
| _ -> ()
(* The individual update function. This is an implementation detail, and clients
should reason in terms of update_multi to be fully agile. None of the hash laws
refer to update. *)
val update (a:md_alg): update_t a
(* Because of blake2, we unfortunately have this precondition creeping up. *)
let update_multi_pre
(a:hash_alg)
(prev:extra_state a)
(blocks:bytes)
=
match a with
| Blake2B | Blake2S ->
(S.length blocks + prev) `less_than_max_input_length` a
| _ -> true
(* Agile multi-block processing function shown to obey the hash laws. *)
val update_multi
(a:hash_alg)
(hash:words_state a)
(prev:extra_state a)
(blocks:bytes_blocks a):
Pure (words_state a)
(requires update_multi_pre a prev blocks)
(ensures fun _ -> True)
val finish (a:hash_alg): Spec.Hash.Definitions.finish_t a
val hash' (a:hash_alg) (input:bytes{S.length input `less_than_max_input_length` a}) (l: output_length a):
Tot (Lib.ByteSequence.lbytes (Spec.Hash.Definitions.hash_length' a l)) | {
"checked_file": "/",
"dependencies": [
"Spec.Hash.Definitions.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Agile.Hash.fsti"
} | [
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Hash.MD",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Hash.Definitions",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Hash.Definitions",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Spec.Agile",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Agile",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
a: Spec.Hash.Definitions.fixed_len_alg ->
input:
Spec.Hash.Definitions.bytes
{Spec.Hash.Definitions.less_than_max_input_length (FStar.Seq.Base.length input) a}
-> Lib.ByteSequence.lbytes (Spec.Hash.Definitions.hash_length' a ()) | Prims.Tot | [
"total"
] | [] | [
"Spec.Hash.Definitions.fixed_len_alg",
"Spec.Hash.Definitions.bytes",
"Prims.b2t",
"Spec.Hash.Definitions.less_than_max_input_length",
"FStar.Seq.Base.length",
"Lib.IntTypes.uint8",
"Spec.Agile.Hash.hash'",
"Lib.ByteSequence.lbytes",
"Spec.Hash.Definitions.hash_length'"
] | [] | false | false | false | false | false | let hash (a: fixed_len_alg) (input: bytes{(S.length input) `less_than_max_input_length` a}) =
| hash' a input () | false |
|
Spec.K256.PointOps.fst | Spec.K256.PointOps.to_proj_point | val to_proj_point (p: aff_point) : proj_point | val to_proj_point (p: aff_point) : proj_point | let to_proj_point (p:aff_point) : proj_point =
let (x, y) = p in (x, y, one) | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 31,
"end_line": 82,
"start_col": 0,
"start_line": 81
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q}
let qadd (x y:qelem) : qelem = (x + y) % q
let qmul (x y:qelem) : qelem = (x * y) % q
let qinv (x:qelem) : qelem = M.pow_mod #q x (q - 2)
let qnegate (x:qelem) : qelem = (- x) % q
let scalar_is_high (x:qelem) : bool = x > q / 2
let ( +^ ) = qadd
let ( *^ ) = qmul
/// Elliptic curve
let aff_point = felem & felem // Affine point
let proj_point = felem & felem & felem // Projective coordinates
// y * y = x * x * x + b
let b : felem = 7
let is_on_curve (p:aff_point) =
let x, y = p in y *% y = x *% x *% x +% b
let aff_point_at_inf : aff_point = (zero, zero) // not on the curve!
let point_at_inf : proj_point = (zero, one, zero)
let is_aff_point_at_inf (p:aff_point) : bool =
let (x, y) = p in x = zero && y = zero
let is_proj_point_at_inf (p:proj_point) : bool =
let (_, _, z) = p in z = zero
let to_aff_point (p:proj_point) : aff_point =
// if is_proj_point_at_inf p then aff_point_at_inf
let (px, py, pz) = p in
let zinv = finv pz in
let x = px *% zinv in
let y = py *% zinv in
(x, y) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: Spec.K256.PointOps.aff_point -> Spec.K256.PointOps.proj_point | Prims.Tot | [
"total"
] | [] | [
"Spec.K256.PointOps.aff_point",
"Spec.K256.PointOps.felem",
"FStar.Pervasives.Native.Mktuple3",
"Spec.K256.PointOps.one",
"Spec.K256.PointOps.proj_point"
] | [] | false | false | false | true | false | let to_proj_point (p: aff_point) : proj_point =
| let x, y = p in
(x, y, one) | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.zero | val zero:felem | val zero:felem | let zero : felem = 0 | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 20,
"end_line": 21,
"start_col": 0,
"start_line": 21
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Spec.K256.PointOps.felem | Prims.Tot | [
"total"
] | [] | [] | [] | false | false | false | true | false | let zero:felem =
| 0 | false |
Spec.Agile.Hash.fsti | Spec.Agile.Hash.update_multi_pre | val update_multi_pre : a: Spec.Hash.Definitions.hash_alg ->
prev: Spec.Hash.Definitions.extra_state a ->
blocks: Spec.Hash.Definitions.bytes
-> Prims.bool | let update_multi_pre
(a:hash_alg)
(prev:extra_state a)
(blocks:bytes)
=
match a with
| Blake2B | Blake2S ->
(S.length blocks + prev) `less_than_max_input_length` a
| _ -> true | {
"file_name": "specs/Spec.Agile.Hash.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 13,
"end_line": 74,
"start_col": 0,
"start_line": 66
} | module Spec.Agile.Hash
module S = FStar.Seq
include Spec.Hash.Definitions
open FStar.Mul
(** Hashes, agility, incrementality, streaming, and hash laws.
For various historical reasons, this module serves two purposes.
- For Merkle-Damgård algorithms (MD5, SHA1, SHA2), this module acts as a
*definitional* specification. This *is* the spec of the MD algorithms, and
low-level implementations (at least, historically) we shown to refine this
specification.
- For non-MD algorithms (Blake2, SHA3), this module serves a different purpose:
it shows that Blake2 and SHA3 obey the hash laws (more on that below), and
that therefore they can be suitably interpreted as behaving like hash
algorithms in this agile specification. The agile hash therefore obeys the
hash laws, because every algorithm does.
This agile specification, in addition to establishing the high-level property that
"all hash algorithms behave like hashes" (i.e., obey the hash laws), serves as
a specification of the agile, multiplexing hash known as EverCrypt.Hash.
The hash laws are as follows.
- Any hash algorithm can be decomposed into an *incremental* specification,
relying on: init, update_multi, update_last, finish. (The MD hashes
specifically decompose update_last as padding + update but this is not
generally true of all hashes.) See
Spec.Hash.Incremental.Definitions.hash_incremental, along with the various
proofs in Spec.Hash.Incremental.X that algorithm X is equivalent to its
incremental specification.
- The update_multi function, which processes n full blocks into the internal
hash state (also known as the accumulator, borrowing from functional
programming terminology for folds), takes the empty input as its neutral element.
Concretely:
update_multi acc empty == acc
- The update_multi function is associative. Concretely:
update_multi (update_multi acc blocks) blocks' == update_multi acc (blocks @ blocks')
Proving the three hash laws is important: they are needed by the streaming
functor (which turns a block-by-block implementation into a buffered
implementation that can take arbitrary amounts of data) for functional
correctness.
(In the case of MD hashes, the proof of incrementality specifically relies on
the two properties of update_multi, but this is not true in the general case.)
The incremental specification (in lemmas/Spec.Hash.Incremental.Definitions)
introduces a notion of "update_last" and then defines the hash as update_multi,
update_last, finish.
*)
val init (a:hash_alg): init_t a
let init_extra_state (a:hash_alg) : extra_state a = match a with
| Blake2B | Blake2S -> 0
| _ -> ()
(* The individual update function. This is an implementation detail, and clients
should reason in terms of update_multi to be fully agile. None of the hash laws
refer to update. *)
val update (a:md_alg): update_t a | {
"checked_file": "/",
"dependencies": [
"Spec.Hash.Definitions.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Agile.Hash.fsti"
} | [
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Hash.MD",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Hash.Definitions",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Hash.Definitions",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Spec.Agile",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Agile",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
a: Spec.Hash.Definitions.hash_alg ->
prev: Spec.Hash.Definitions.extra_state a ->
blocks: Spec.Hash.Definitions.bytes
-> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"Spec.Hash.Definitions.hash_alg",
"Spec.Hash.Definitions.extra_state",
"Spec.Hash.Definitions.bytes",
"Spec.Hash.Definitions.less_than_max_input_length",
"Prims.op_Addition",
"FStar.Seq.Base.length",
"Lib.IntTypes.uint8",
"Prims.bool"
] | [] | false | false | false | false | false | let update_multi_pre (a: hash_alg) (prev: extra_state a) (blocks: bytes) =
| match a with
| Blake2B | Blake2S -> (S.length blocks + prev) `less_than_max_input_length` a
| _ -> true | false |
|
Spec.K256.PointOps.fst | Spec.K256.PointOps.to_aff_point | val to_aff_point (p: proj_point) : aff_point | val to_aff_point (p: proj_point) : aff_point | let to_aff_point (p:proj_point) : aff_point =
// if is_proj_point_at_inf p then aff_point_at_inf
let (px, py, pz) = p in
let zinv = finv pz in
let x = px *% zinv in
let y = py *% zinv in
(x, y) | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 8,
"end_line": 79,
"start_col": 0,
"start_line": 73
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q}
let qadd (x y:qelem) : qelem = (x + y) % q
let qmul (x y:qelem) : qelem = (x * y) % q
let qinv (x:qelem) : qelem = M.pow_mod #q x (q - 2)
let qnegate (x:qelem) : qelem = (- x) % q
let scalar_is_high (x:qelem) : bool = x > q / 2
let ( +^ ) = qadd
let ( *^ ) = qmul
/// Elliptic curve
let aff_point = felem & felem // Affine point
let proj_point = felem & felem & felem // Projective coordinates
// y * y = x * x * x + b
let b : felem = 7
let is_on_curve (p:aff_point) =
let x, y = p in y *% y = x *% x *% x +% b
let aff_point_at_inf : aff_point = (zero, zero) // not on the curve!
let point_at_inf : proj_point = (zero, one, zero)
let is_aff_point_at_inf (p:aff_point) : bool =
let (x, y) = p in x = zero && y = zero
let is_proj_point_at_inf (p:proj_point) : bool =
let (_, _, z) = p in z = zero | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: Spec.K256.PointOps.proj_point -> Spec.K256.PointOps.aff_point | Prims.Tot | [
"total"
] | [] | [
"Spec.K256.PointOps.proj_point",
"Spec.K256.PointOps.felem",
"FStar.Pervasives.Native.Mktuple2",
"Spec.K256.PointOps.op_Star_Percent",
"Spec.K256.PointOps.finv",
"Spec.K256.PointOps.aff_point"
] | [] | false | false | false | true | false | let to_aff_point (p: proj_point) : aff_point =
| let px, py, pz = p in
let zinv = finv pz in
let x = px *% zinv in
let y = py *% zinv in
(x, y) | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.fadd | val fadd (x y: felem) : felem | val fadd (x y: felem) : felem | let fadd (x y:felem) : felem = (x + y) % prime | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 46,
"end_line": 24,
"start_col": 0,
"start_line": 24
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: Spec.K256.PointOps.felem -> y: Spec.K256.PointOps.felem -> Spec.K256.PointOps.felem | Prims.Tot | [
"total"
] | [] | [
"Spec.K256.PointOps.felem",
"Prims.op_Modulus",
"Prims.op_Addition",
"Spec.K256.PointOps.prime"
] | [] | false | false | false | true | false | let fadd (x y: felem) : felem =
| (x + y) % prime | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.g | val g:proj_point | val g:proj_point | let g : proj_point = (g_x, g_y, one) | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 36,
"end_line": 87,
"start_col": 0,
"start_line": 87
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q}
let qadd (x y:qelem) : qelem = (x + y) % q
let qmul (x y:qelem) : qelem = (x * y) % q
let qinv (x:qelem) : qelem = M.pow_mod #q x (q - 2)
let qnegate (x:qelem) : qelem = (- x) % q
let scalar_is_high (x:qelem) : bool = x > q / 2
let ( +^ ) = qadd
let ( *^ ) = qmul
/// Elliptic curve
let aff_point = felem & felem // Affine point
let proj_point = felem & felem & felem // Projective coordinates
// y * y = x * x * x + b
let b : felem = 7
let is_on_curve (p:aff_point) =
let x, y = p in y *% y = x *% x *% x +% b
let aff_point_at_inf : aff_point = (zero, zero) // not on the curve!
let point_at_inf : proj_point = (zero, one, zero)
let is_aff_point_at_inf (p:aff_point) : bool =
let (x, y) = p in x = zero && y = zero
let is_proj_point_at_inf (p:proj_point) : bool =
let (_, _, z) = p in z = zero
let to_aff_point (p:proj_point) : aff_point =
// if is_proj_point_at_inf p then aff_point_at_inf
let (px, py, pz) = p in
let zinv = finv pz in
let x = px *% zinv in
let y = py *% zinv in
(x, y)
let to_proj_point (p:aff_point) : proj_point =
let (x, y) = p in (x, y, one)
// Base point
let g_x : felem = 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Spec.K256.PointOps.proj_point | Prims.Tot | [
"total"
] | [] | [
"FStar.Pervasives.Native.Mktuple3",
"Spec.K256.PointOps.felem",
"Spec.K256.PointOps.g_x",
"Spec.K256.PointOps.g_y",
"Spec.K256.PointOps.one"
] | [] | false | false | false | true | false | let g:proj_point =
| (g_x, g_y, one) | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.finv | val finv (x: felem) : felem | val finv (x: felem) : felem | let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2) | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 59,
"end_line": 27,
"start_col": 0,
"start_line": 27
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: Spec.K256.PointOps.felem -> Spec.K256.PointOps.felem | Prims.Tot | [
"total"
] | [] | [
"Spec.K256.PointOps.felem",
"Lib.NatMod.pow_mod",
"Spec.K256.PointOps.prime",
"Prims.op_Subtraction"
] | [] | false | false | false | true | false | let finv (x: felem) : felem =
| M.pow_mod #prime x (prime - 2) | false |
Spec.Agile.Hash.fsti | Spec.Agile.Hash.init_extra_state | val init_extra_state (a: hash_alg) : extra_state a | val init_extra_state (a: hash_alg) : extra_state a | let init_extra_state (a:hash_alg) : extra_state a = match a with
| Blake2B | Blake2S -> 0
| _ -> () | {
"file_name": "specs/Spec.Agile.Hash.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 11,
"end_line": 58,
"start_col": 0,
"start_line": 56
} | module Spec.Agile.Hash
module S = FStar.Seq
include Spec.Hash.Definitions
open FStar.Mul
(** Hashes, agility, incrementality, streaming, and hash laws.
For various historical reasons, this module serves two purposes.
- For Merkle-Damgård algorithms (MD5, SHA1, SHA2), this module acts as a
*definitional* specification. This *is* the spec of the MD algorithms, and
low-level implementations (at least, historically) we shown to refine this
specification.
- For non-MD algorithms (Blake2, SHA3), this module serves a different purpose:
it shows that Blake2 and SHA3 obey the hash laws (more on that below), and
that therefore they can be suitably interpreted as behaving like hash
algorithms in this agile specification. The agile hash therefore obeys the
hash laws, because every algorithm does.
This agile specification, in addition to establishing the high-level property that
"all hash algorithms behave like hashes" (i.e., obey the hash laws), serves as
a specification of the agile, multiplexing hash known as EverCrypt.Hash.
The hash laws are as follows.
- Any hash algorithm can be decomposed into an *incremental* specification,
relying on: init, update_multi, update_last, finish. (The MD hashes
specifically decompose update_last as padding + update but this is not
generally true of all hashes.) See
Spec.Hash.Incremental.Definitions.hash_incremental, along with the various
proofs in Spec.Hash.Incremental.X that algorithm X is equivalent to its
incremental specification.
- The update_multi function, which processes n full blocks into the internal
hash state (also known as the accumulator, borrowing from functional
programming terminology for folds), takes the empty input as its neutral element.
Concretely:
update_multi acc empty == acc
- The update_multi function is associative. Concretely:
update_multi (update_multi acc blocks) blocks' == update_multi acc (blocks @ blocks')
Proving the three hash laws is important: they are needed by the streaming
functor (which turns a block-by-block implementation into a buffered
implementation that can take arbitrary amounts of data) for functional
correctness.
(In the case of MD hashes, the proof of incrementality specifically relies on
the two properties of update_multi, but this is not true in the general case.)
The incremental specification (in lemmas/Spec.Hash.Incremental.Definitions)
introduces a notion of "update_last" and then defines the hash as update_multi,
update_last, finish.
*)
val init (a:hash_alg): init_t a | {
"checked_file": "/",
"dependencies": [
"Spec.Hash.Definitions.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Agile.Hash.fsti"
} | [
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Hash.MD",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Hash.Definitions",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Hash.Definitions",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Spec.Agile",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Agile",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | a: Spec.Hash.Definitions.hash_alg -> Spec.Hash.Definitions.extra_state a | Prims.Tot | [
"total"
] | [] | [
"Spec.Hash.Definitions.hash_alg",
"Spec.Hash.Definitions.extra_state"
] | [] | false | false | false | false | false | let init_extra_state (a: hash_alg) : extra_state a =
| match a with
| Blake2B | Blake2S -> 0
| _ -> () | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.one | val one:felem | val one:felem | let one : felem = 1 | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 20,
"end_line": 22,
"start_col": 0,
"start_line": 22
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime} | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Spec.K256.PointOps.felem | Prims.Tot | [
"total"
] | [] | [] | [] | false | false | false | true | false | let one:felem =
| 1 | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.fsub | val fsub (x y: felem) : felem | val fsub (x y: felem) : felem | let fsub (x y:felem) : felem = (x - y) % prime | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 46,
"end_line": 25,
"start_col": 0,
"start_line": 25
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: Spec.K256.PointOps.felem -> y: Spec.K256.PointOps.felem -> Spec.K256.PointOps.felem | Prims.Tot | [
"total"
] | [] | [
"Spec.K256.PointOps.felem",
"Prims.op_Modulus",
"Prims.op_Subtraction",
"Spec.K256.PointOps.prime"
] | [] | false | false | false | true | false | let fsub (x y: felem) : felem =
| (x - y) % prime | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.b | val b:felem | val b:felem | let b : felem = 7 | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 17,
"end_line": 59,
"start_col": 0,
"start_line": 59
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q}
let qadd (x y:qelem) : qelem = (x + y) % q
let qmul (x y:qelem) : qelem = (x * y) % q
let qinv (x:qelem) : qelem = M.pow_mod #q x (q - 2)
let qnegate (x:qelem) : qelem = (- x) % q
let scalar_is_high (x:qelem) : bool = x > q / 2
let ( +^ ) = qadd
let ( *^ ) = qmul
/// Elliptic curve
let aff_point = felem & felem // Affine point
let proj_point = felem & felem & felem // Projective coordinates | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Spec.K256.PointOps.felem | Prims.Tot | [
"total"
] | [] | [] | [] | false | false | false | true | false | let b:felem =
| 7 | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.fsqrt | val fsqrt (x: felem) : felem | val fsqrt (x: felem) : felem | let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4) | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 66,
"end_line": 28,
"start_col": 0,
"start_line": 28
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: Spec.K256.PointOps.felem -> Spec.K256.PointOps.felem | Prims.Tot | [
"total"
] | [] | [
"Spec.K256.PointOps.felem",
"Lib.NatMod.pow_mod",
"Spec.K256.PointOps.prime",
"Prims.op_Division",
"Prims.op_Addition"
] | [] | false | false | false | true | false | let fsqrt (x: felem) : felem =
| M.pow_mod #prime x ((prime + 1) / 4) | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.fmul | val fmul (x y: felem) : felem | val fmul (x y: felem) : felem | let fmul (x y:felem) : felem = (x * y) % prime | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 46,
"end_line": 26,
"start_col": 0,
"start_line": 26
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: Spec.K256.PointOps.felem -> y: Spec.K256.PointOps.felem -> Spec.K256.PointOps.felem | Prims.Tot | [
"total"
] | [] | [
"Spec.K256.PointOps.felem",
"Prims.op_Modulus",
"FStar.Mul.op_Star",
"Spec.K256.PointOps.prime"
] | [] | false | false | false | true | false | let fmul (x y: felem) : felem =
| (x * y) % prime | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.qadd | val qadd (x y: qelem) : qelem | val qadd (x y: qelem) : qelem | let qadd (x y:qelem) : qelem = (x + y) % q | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 42,
"end_line": 44,
"start_col": 0,
"start_line": 44
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: Spec.K256.PointOps.qelem -> y: Spec.K256.PointOps.qelem -> Spec.K256.PointOps.qelem | Prims.Tot | [
"total"
] | [] | [
"Spec.K256.PointOps.qelem",
"Prims.op_Modulus",
"Prims.op_Addition",
"Spec.K256.PointOps.q"
] | [] | false | false | false | true | false | let qadd (x y: qelem) : qelem =
| (x + y) % q | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.qinv | val qinv (x: qelem) : qelem | val qinv (x: qelem) : qelem | let qinv (x:qelem) : qelem = M.pow_mod #q x (q - 2) | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 51,
"end_line": 46,
"start_col": 0,
"start_line": 46
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q}
let qadd (x y:qelem) : qelem = (x + y) % q | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: Spec.K256.PointOps.qelem -> Spec.K256.PointOps.qelem | Prims.Tot | [
"total"
] | [] | [
"Spec.K256.PointOps.qelem",
"Lib.NatMod.pow_mod",
"Spec.K256.PointOps.q",
"Prims.op_Subtraction"
] | [] | false | false | false | true | false | let qinv (x: qelem) : qelem =
| M.pow_mod #q x (q - 2) | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.qmul | val qmul (x y: qelem) : qelem | val qmul (x y: qelem) : qelem | let qmul (x y:qelem) : qelem = (x * y) % q | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 42,
"end_line": 45,
"start_col": 0,
"start_line": 45
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q} | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: Spec.K256.PointOps.qelem -> y: Spec.K256.PointOps.qelem -> Spec.K256.PointOps.qelem | Prims.Tot | [
"total"
] | [] | [
"Spec.K256.PointOps.qelem",
"Prims.op_Modulus",
"FStar.Mul.op_Star",
"Spec.K256.PointOps.q"
] | [] | false | false | false | true | false | let qmul (x y: qelem) : qelem =
| (x * y) % q | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.g_x | val g_x:felem | val g_x:felem | let g_x : felem = 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798 | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 84,
"end_line": 85,
"start_col": 0,
"start_line": 85
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q}
let qadd (x y:qelem) : qelem = (x + y) % q
let qmul (x y:qelem) : qelem = (x * y) % q
let qinv (x:qelem) : qelem = M.pow_mod #q x (q - 2)
let qnegate (x:qelem) : qelem = (- x) % q
let scalar_is_high (x:qelem) : bool = x > q / 2
let ( +^ ) = qadd
let ( *^ ) = qmul
/// Elliptic curve
let aff_point = felem & felem // Affine point
let proj_point = felem & felem & felem // Projective coordinates
// y * y = x * x * x + b
let b : felem = 7
let is_on_curve (p:aff_point) =
let x, y = p in y *% y = x *% x *% x +% b
let aff_point_at_inf : aff_point = (zero, zero) // not on the curve!
let point_at_inf : proj_point = (zero, one, zero)
let is_aff_point_at_inf (p:aff_point) : bool =
let (x, y) = p in x = zero && y = zero
let is_proj_point_at_inf (p:proj_point) : bool =
let (_, _, z) = p in z = zero
let to_aff_point (p:proj_point) : aff_point =
// if is_proj_point_at_inf p then aff_point_at_inf
let (px, py, pz) = p in
let zinv = finv pz in
let x = px *% zinv in
let y = py *% zinv in
(x, y)
let to_proj_point (p:aff_point) : proj_point =
let (x, y) = p in (x, y, one) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Spec.K256.PointOps.felem | Prims.Tot | [
"total"
] | [] | [] | [] | false | false | false | true | false | let g_x:felem =
| 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798 | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.qnegate | val qnegate (x: qelem) : qelem | val qnegate (x: qelem) : qelem | let qnegate (x:qelem) : qelem = (- x) % q | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 41,
"end_line": 47,
"start_col": 0,
"start_line": 47
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q}
let qadd (x y:qelem) : qelem = (x + y) % q
let qmul (x y:qelem) : qelem = (x * y) % q | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: Spec.K256.PointOps.qelem -> Spec.K256.PointOps.qelem | Prims.Tot | [
"total"
] | [] | [
"Spec.K256.PointOps.qelem",
"Prims.op_Modulus",
"Prims.op_Minus",
"Spec.K256.PointOps.q"
] | [] | false | false | false | true | false | let qnegate (x: qelem) : qelem =
| (- x) % q | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.prime | val prime:(p: pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) | val prime:(p: pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) | let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1 | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 24,
"end_line": 18,
"start_col": 0,
"start_line": 14
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: Prims.pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F} | Prims.Tot | [
"total"
] | [] | [
"Prims.op_Subtraction",
"Prims.pow2",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.b2t",
"Prims.op_Equality",
"Prims.int",
"Prims.op_LessThan"
] | [] | false | false | false | false | false | let prime:(p: pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
| assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 =
pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 =
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1 | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.is_aff_point_at_inf | val is_aff_point_at_inf (p: aff_point) : bool | val is_aff_point_at_inf (p: aff_point) : bool | let is_aff_point_at_inf (p:aff_point) : bool =
let (x, y) = p in x = zero && y = zero | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 40,
"end_line": 68,
"start_col": 0,
"start_line": 67
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q}
let qadd (x y:qelem) : qelem = (x + y) % q
let qmul (x y:qelem) : qelem = (x * y) % q
let qinv (x:qelem) : qelem = M.pow_mod #q x (q - 2)
let qnegate (x:qelem) : qelem = (- x) % q
let scalar_is_high (x:qelem) : bool = x > q / 2
let ( +^ ) = qadd
let ( *^ ) = qmul
/// Elliptic curve
let aff_point = felem & felem // Affine point
let proj_point = felem & felem & felem // Projective coordinates
// y * y = x * x * x + b
let b : felem = 7
let is_on_curve (p:aff_point) =
let x, y = p in y *% y = x *% x *% x +% b
let aff_point_at_inf : aff_point = (zero, zero) // not on the curve!
let point_at_inf : proj_point = (zero, one, zero) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: Spec.K256.PointOps.aff_point -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"Spec.K256.PointOps.aff_point",
"Spec.K256.PointOps.felem",
"Prims.op_AmpAmp",
"Prims.op_Equality",
"Spec.K256.PointOps.zero",
"Prims.bool"
] | [] | false | false | false | true | false | let is_aff_point_at_inf (p: aff_point) : bool =
| let x, y = p in
x = zero && y = zero | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.aff_point_add | val aff_point_add (p q: aff_point) : aff_point | val aff_point_add (p q: aff_point) : aff_point | let aff_point_add (p:aff_point) (q:aff_point) : aff_point =
let (px, py) = p in let (qx, qy) = q in
if is_aff_point_at_inf p then q
else begin
if is_aff_point_at_inf q then p
else begin
if p = q then aff_point_double p
else begin
if qx = px then aff_point_at_inf
else begin
let lambda = (qy -% py) /% (qx -% px) in
let rx = lambda *% lambda -% px -% qx in
let ry = lambda *% (px -% rx) -% py in
(rx, ry)
end
end
end
end | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 5,
"end_line": 120,
"start_col": 0,
"start_line": 103
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q}
let qadd (x y:qelem) : qelem = (x + y) % q
let qmul (x y:qelem) : qelem = (x * y) % q
let qinv (x:qelem) : qelem = M.pow_mod #q x (q - 2)
let qnegate (x:qelem) : qelem = (- x) % q
let scalar_is_high (x:qelem) : bool = x > q / 2
let ( +^ ) = qadd
let ( *^ ) = qmul
/// Elliptic curve
let aff_point = felem & felem // Affine point
let proj_point = felem & felem & felem // Projective coordinates
// y * y = x * x * x + b
let b : felem = 7
let is_on_curve (p:aff_point) =
let x, y = p in y *% y = x *% x *% x +% b
let aff_point_at_inf : aff_point = (zero, zero) // not on the curve!
let point_at_inf : proj_point = (zero, one, zero)
let is_aff_point_at_inf (p:aff_point) : bool =
let (x, y) = p in x = zero && y = zero
let is_proj_point_at_inf (p:proj_point) : bool =
let (_, _, z) = p in z = zero
let to_aff_point (p:proj_point) : aff_point =
// if is_proj_point_at_inf p then aff_point_at_inf
let (px, py, pz) = p in
let zinv = finv pz in
let x = px *% zinv in
let y = py *% zinv in
(x, y)
let to_proj_point (p:aff_point) : proj_point =
let (x, y) = p in (x, y, one)
// Base point
let g_x : felem = 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798
let g_y : felem = 0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8
let g : proj_point = (g_x, g_y, one)
/// Point addition in affine coordinates
let aff_point_double (p:aff_point) : aff_point =
let (px, py) = p in
if is_aff_point_at_inf p then p
else begin
if py = 0 then aff_point_at_inf
else begin
let lambda = 3 *% px *% px /% (2 *% py) in
let rx = lambda *% lambda -% px -% px in
let ry = lambda *% (px -% rx) -% py in
(rx, ry) end
end | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: Spec.K256.PointOps.aff_point -> q: Spec.K256.PointOps.aff_point -> Spec.K256.PointOps.aff_point | Prims.Tot | [
"total"
] | [] | [
"Spec.K256.PointOps.aff_point",
"Spec.K256.PointOps.felem",
"Spec.K256.PointOps.is_aff_point_at_inf",
"Prims.bool",
"Prims.op_Equality",
"Spec.K256.PointOps.aff_point_double",
"Spec.K256.PointOps.aff_point_at_inf",
"FStar.Pervasives.Native.Mktuple2",
"Spec.K256.PointOps.op_Subtraction_Percent",
"Spec.K256.PointOps.op_Star_Percent",
"Spec.K256.PointOps.op_Slash_Percent"
] | [] | false | false | false | true | false | let aff_point_add (p q: aff_point) : aff_point =
| let px, py = p in
let qx, qy = q in
if is_aff_point_at_inf p
then q
else
if is_aff_point_at_inf q
then p
else
if p = q
then aff_point_double p
else
if qx = px
then aff_point_at_inf
else
let lambda = (qy -% py) /% (qx -% px) in
let rx = lambda *% lambda -% px -% qx in
let ry = lambda *% (px -% rx) -% py in
(rx, ry) | false |
Hacl.Spec.PrecompBaseTable256.fst | Hacl.Spec.PrecompBaseTable256.lemma_point_mul_base_precomp4 | val lemma_point_mul_base_precomp4: #t:Type -> k:LE.comm_monoid t -> a:t -> b:nat{b < pow2 256} ->
Lemma (exp_as_exp_four_nat256_precomp k a b == LE.pow k a b) | val lemma_point_mul_base_precomp4: #t:Type -> k:LE.comm_monoid t -> a:t -> b:nat{b < pow2 256} ->
Lemma (exp_as_exp_four_nat256_precomp k a b == LE.pow k a b) | let lemma_point_mul_base_precomp4 #t k a b =
let (b0, b1, b2, b3) = decompose_nat256_as_four_u64 b in
let a_pow2_64 = LE.pow k a (pow2 64) in
let a_pow2_128 = LE.pow k a (pow2 128) in
let a_pow2_192 = LE.pow k a (pow2 192) in
let res = LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 in
calc (==) {
LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4;
(==) { LE.exp_four_fw_lemma k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 }
k.LE.mul
(k.LE.mul
(k.LE.mul (LE.pow k a b0) (LE.pow k (LE.pow k a (pow2 64)) b1))
(LE.pow k a_pow2_128 b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_mul k a (pow2 64) b1 }
k.LE.mul
(k.LE.mul
(k.LE.mul (LE.pow k a b0) (LE.pow k a (b1 * pow2 64)))
(LE.pow k a_pow2_128 b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_add k a b0 (b1 * pow2 64) }
k.LE.mul
(k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64))
(LE.pow k (LE.pow k a (pow2 128)) b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_mul k a (pow2 128) b2 }
k.LE.mul
(k.LE.mul (LE.pow k a (b0 + b1 * pow2 64)) (LE.pow k a (b2 * pow2 128)))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_add k a (b0 + b1 * pow2 64) (b2 * pow2 128) }
k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128))
(LE.pow k (LE.pow k a (pow2 192)) b3);
(==) { LE.lemma_pow_mul k a (pow2 192) b3 }
k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128))
(LE.pow k a (b3 * pow2 192));
(==) { LE.lemma_pow_add k a (b0 + b1 * pow2 64 + b2 * pow2 128) (b3 * pow2 192) }
LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128 + b3 * pow2 192);
(==) { lemma_decompose_nat256_as_four_u64 b }
LE.pow k a b;
} | {
"file_name": "code/bignum/Hacl.Spec.PrecompBaseTable256.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 3,
"end_line": 91,
"start_col": 0,
"start_line": 48
} | module Hacl.Spec.PrecompBaseTable256
open FStar.Mul
open Lib.IntTypes
module LSeq = Lib.Sequence
module Loops = Lib.LoopCombinators
module LE = Lib.Exponentiation
module SE = Spec.Exponentiation
module BD = Hacl.Spec.Bignum.Definitions
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
let lemma_mod_pow2_sub x a b =
calc (==) {
x / pow2 a % pow2 b * pow2 a;
(==) { Math.Lemmas.pow2_modulo_division_lemma_1 x a (a + b) }
x % pow2 (a + b) / pow2 a * pow2 a;
(==) { Math.Lemmas.euclidean_division_definition (x % pow2 (a + b)) (pow2 a) }
x % pow2 (a + b) - x % pow2 (a + b) % pow2 a;
(==) { Math.Lemmas.pow2_modulo_modulo_lemma_1 x a (a + b) }
x % pow2 (a + b) - x % pow2 a;
}
let lemma_decompose_nat256_as_four_u64 x =
let x0 = x % pow2 64 in
let x1 = x / pow2 64 % pow2 64 in
let x2 = x / pow2 128 % pow2 64 in
let x3' = x / pow2 192 % pow2 64 in
Math.Lemmas.lemma_div_lt x 256 192;
Math.Lemmas.small_mod (x / pow2 192) (pow2 64);
let x3 = x / pow2 192 in
assert (x3 == x3');
calc (==) {
x0 + x1 * pow2 64 + x2 * pow2 128 + x3 * pow2 192;
(==) { }
x0 + x1 * pow2 64 + (x / pow2 128 % pow2 64) * pow2 128 + x / pow2 192 * pow2 192;
(==) { lemma_mod_pow2_sub x 128 64 }
x0 + x1 * pow2 64 + x % pow2 192 - x % pow2 128 + x / pow2 192 * pow2 192;
(==) { Math.Lemmas.euclidean_division_definition x (pow2 192) }
x0 + x1 * pow2 64 - x % pow2 128 + x;
(==) { lemma_mod_pow2_sub x 64 64 }
x;
} | {
"checked_file": "/",
"dependencies": [
"Spec.Exponentiation.fsti.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Exponentiation.fsti.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": true,
"source_file": "Hacl.Spec.PrecompBaseTable256.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loops"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "Spec.Exponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation",
"short_module": "LE"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | k: Lib.Exponentiation.Definition.comm_monoid t -> a: t -> b: Prims.nat{b < Prims.pow2 256}
-> FStar.Pervasives.Lemma
(ensures
Hacl.Spec.PrecompBaseTable256.exp_as_exp_four_nat256_precomp k a b ==
Lib.Exponentiation.Definition.pow k a b) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Lib.Exponentiation.Definition.comm_monoid",
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThan",
"Prims.pow2",
"Prims.int",
"FStar.Calc.calc_finish",
"Prims.eq2",
"Lib.Exponentiation.exp_four_fw",
"Lib.Exponentiation.Definition.pow",
"Prims.Cons",
"FStar.Preorder.relation",
"Prims.Nil",
"Prims.unit",
"FStar.Calc.calc_step",
"Prims.op_Addition",
"FStar.Mul.op_Star",
"Lib.Exponentiation.Definition.__proj__Mkcomm_monoid__item__mul",
"FStar.Calc.calc_init",
"FStar.Calc.calc_pack",
"Lib.Exponentiation.exp_four_fw_lemma",
"Prims.squash",
"Lib.Exponentiation.Definition.lemma_pow_mul",
"Lib.Exponentiation.Definition.lemma_pow_add",
"Hacl.Spec.PrecompBaseTable256.lemma_decompose_nat256_as_four_u64",
"FStar.Pervasives.Native.tuple4",
"Hacl.Spec.PrecompBaseTable256.decompose_nat256_as_four_u64"
] | [] | false | false | true | false | false | let lemma_point_mul_base_precomp4 #t k a b =
| let b0, b1, b2, b3 = decompose_nat256_as_four_u64 b in
let a_pow2_64 = LE.pow k a (pow2 64) in
let a_pow2_128 = LE.pow k a (pow2 128) in
let a_pow2_192 = LE.pow k a (pow2 192) in
let res = LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 in
calc ( == ) {
LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4;
( == ) { LE.exp_four_fw_lemma k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 }
k.LE.mul (k.LE.mul (k.LE.mul (LE.pow k a b0) (LE.pow k (LE.pow k a (pow2 64)) b1))
(LE.pow k a_pow2_128 b2))
(LE.pow k a_pow2_192 b3);
( == ) { LE.lemma_pow_mul k a (pow2 64) b1 }
k.LE.mul (k.LE.mul (k.LE.mul (LE.pow k a b0) (LE.pow k a (b1 * pow2 64))) (LE.pow k a_pow2_128 b2)
)
(LE.pow k a_pow2_192 b3);
( == ) { LE.lemma_pow_add k a b0 (b1 * pow2 64) }
k.LE.mul (k.LE.mul (LE.pow k a (b0 + b1 * pow2 64)) (LE.pow k (LE.pow k a (pow2 128)) b2))
(LE.pow k a_pow2_192 b3);
( == ) { LE.lemma_pow_mul k a (pow2 128) b2 }
k.LE.mul (k.LE.mul (LE.pow k a (b0 + b1 * pow2 64)) (LE.pow k a (b2 * pow2 128)))
(LE.pow k a_pow2_192 b3);
( == ) { LE.lemma_pow_add k a (b0 + b1 * pow2 64) (b2 * pow2 128) }
k.LE.mul (LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128)) (LE.pow k (LE.pow k a (pow2 192)) b3);
( == ) { LE.lemma_pow_mul k a (pow2 192) b3 }
k.LE.mul (LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128)) (LE.pow k a (b3 * pow2 192));
( == ) { LE.lemma_pow_add k a (b0 + b1 * pow2 64 + b2 * pow2 128) (b3 * pow2 192) }
LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128 + b3 * pow2 192);
( == ) { lemma_decompose_nat256_as_four_u64 b }
LE.pow k a b;
} | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.aff_point_negate | val aff_point_negate (p: aff_point) : aff_point | val aff_point_negate (p: aff_point) : aff_point | let aff_point_negate (p:aff_point) : aff_point =
let x, y = p in x, (-y) % prime | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 33,
"end_line": 123,
"start_col": 0,
"start_line": 122
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q}
let qadd (x y:qelem) : qelem = (x + y) % q
let qmul (x y:qelem) : qelem = (x * y) % q
let qinv (x:qelem) : qelem = M.pow_mod #q x (q - 2)
let qnegate (x:qelem) : qelem = (- x) % q
let scalar_is_high (x:qelem) : bool = x > q / 2
let ( +^ ) = qadd
let ( *^ ) = qmul
/// Elliptic curve
let aff_point = felem & felem // Affine point
let proj_point = felem & felem & felem // Projective coordinates
// y * y = x * x * x + b
let b : felem = 7
let is_on_curve (p:aff_point) =
let x, y = p in y *% y = x *% x *% x +% b
let aff_point_at_inf : aff_point = (zero, zero) // not on the curve!
let point_at_inf : proj_point = (zero, one, zero)
let is_aff_point_at_inf (p:aff_point) : bool =
let (x, y) = p in x = zero && y = zero
let is_proj_point_at_inf (p:proj_point) : bool =
let (_, _, z) = p in z = zero
let to_aff_point (p:proj_point) : aff_point =
// if is_proj_point_at_inf p then aff_point_at_inf
let (px, py, pz) = p in
let zinv = finv pz in
let x = px *% zinv in
let y = py *% zinv in
(x, y)
let to_proj_point (p:aff_point) : proj_point =
let (x, y) = p in (x, y, one)
// Base point
let g_x : felem = 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798
let g_y : felem = 0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8
let g : proj_point = (g_x, g_y, one)
/// Point addition in affine coordinates
let aff_point_double (p:aff_point) : aff_point =
let (px, py) = p in
if is_aff_point_at_inf p then p
else begin
if py = 0 then aff_point_at_inf
else begin
let lambda = 3 *% px *% px /% (2 *% py) in
let rx = lambda *% lambda -% px -% px in
let ry = lambda *% (px -% rx) -% py in
(rx, ry) end
end
let aff_point_add (p:aff_point) (q:aff_point) : aff_point =
let (px, py) = p in let (qx, qy) = q in
if is_aff_point_at_inf p then q
else begin
if is_aff_point_at_inf q then p
else begin
if p = q then aff_point_double p
else begin
if qx = px then aff_point_at_inf
else begin
let lambda = (qy -% py) /% (qx -% px) in
let rx = lambda *% lambda -% px -% qx in
let ry = lambda *% (px -% rx) -% py in
(rx, ry)
end
end
end
end | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: Spec.K256.PointOps.aff_point -> Spec.K256.PointOps.aff_point | Prims.Tot | [
"total"
] | [] | [
"Spec.K256.PointOps.aff_point",
"Spec.K256.PointOps.felem",
"FStar.Pervasives.Native.Mktuple2",
"Prims.op_Modulus",
"Prims.op_Minus",
"Spec.K256.PointOps.prime"
] | [] | false | false | false | true | false | let aff_point_negate (p: aff_point) : aff_point =
| let x, y = p in
x, (- y) % prime | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.aff_point_double | val aff_point_double (p: aff_point) : aff_point | val aff_point_double (p: aff_point) : aff_point | let aff_point_double (p:aff_point) : aff_point =
let (px, py) = p in
if is_aff_point_at_inf p then p
else begin
if py = 0 then aff_point_at_inf
else begin
let lambda = 3 *% px *% px /% (2 *% py) in
let rx = lambda *% lambda -% px -% px in
let ry = lambda *% (px -% rx) -% py in
(rx, ry) end
end | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 5,
"end_line": 101,
"start_col": 0,
"start_line": 91
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q}
let qadd (x y:qelem) : qelem = (x + y) % q
let qmul (x y:qelem) : qelem = (x * y) % q
let qinv (x:qelem) : qelem = M.pow_mod #q x (q - 2)
let qnegate (x:qelem) : qelem = (- x) % q
let scalar_is_high (x:qelem) : bool = x > q / 2
let ( +^ ) = qadd
let ( *^ ) = qmul
/// Elliptic curve
let aff_point = felem & felem // Affine point
let proj_point = felem & felem & felem // Projective coordinates
// y * y = x * x * x + b
let b : felem = 7
let is_on_curve (p:aff_point) =
let x, y = p in y *% y = x *% x *% x +% b
let aff_point_at_inf : aff_point = (zero, zero) // not on the curve!
let point_at_inf : proj_point = (zero, one, zero)
let is_aff_point_at_inf (p:aff_point) : bool =
let (x, y) = p in x = zero && y = zero
let is_proj_point_at_inf (p:proj_point) : bool =
let (_, _, z) = p in z = zero
let to_aff_point (p:proj_point) : aff_point =
// if is_proj_point_at_inf p then aff_point_at_inf
let (px, py, pz) = p in
let zinv = finv pz in
let x = px *% zinv in
let y = py *% zinv in
(x, y)
let to_proj_point (p:aff_point) : proj_point =
let (x, y) = p in (x, y, one)
// Base point
let g_x : felem = 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798
let g_y : felem = 0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8
let g : proj_point = (g_x, g_y, one)
/// Point addition in affine coordinates | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: Spec.K256.PointOps.aff_point -> Spec.K256.PointOps.aff_point | Prims.Tot | [
"total"
] | [] | [
"Spec.K256.PointOps.aff_point",
"Spec.K256.PointOps.felem",
"Spec.K256.PointOps.is_aff_point_at_inf",
"Prims.bool",
"Prims.op_Equality",
"Prims.int",
"Spec.K256.PointOps.aff_point_at_inf",
"FStar.Pervasives.Native.Mktuple2",
"Spec.K256.PointOps.op_Subtraction_Percent",
"Spec.K256.PointOps.op_Star_Percent",
"Spec.K256.PointOps.op_Slash_Percent"
] | [] | false | false | false | true | false | let aff_point_double (p: aff_point) : aff_point =
| let px, py = p in
if is_aff_point_at_inf p
then p
else
if py = 0
then aff_point_at_inf
else
let lambda = 3 *% px *% px /% (2 *% py) in
let rx = lambda *% lambda -% px -% px in
let ry = lambda *% (px -% rx) -% py in
(rx, ry) | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.g_y | val g_y:felem | val g_y:felem | let g_y : felem = 0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8 | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 84,
"end_line": 86,
"start_col": 0,
"start_line": 86
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q}
let qadd (x y:qelem) : qelem = (x + y) % q
let qmul (x y:qelem) : qelem = (x * y) % q
let qinv (x:qelem) : qelem = M.pow_mod #q x (q - 2)
let qnegate (x:qelem) : qelem = (- x) % q
let scalar_is_high (x:qelem) : bool = x > q / 2
let ( +^ ) = qadd
let ( *^ ) = qmul
/// Elliptic curve
let aff_point = felem & felem // Affine point
let proj_point = felem & felem & felem // Projective coordinates
// y * y = x * x * x + b
let b : felem = 7
let is_on_curve (p:aff_point) =
let x, y = p in y *% y = x *% x *% x +% b
let aff_point_at_inf : aff_point = (zero, zero) // not on the curve!
let point_at_inf : proj_point = (zero, one, zero)
let is_aff_point_at_inf (p:aff_point) : bool =
let (x, y) = p in x = zero && y = zero
let is_proj_point_at_inf (p:proj_point) : bool =
let (_, _, z) = p in z = zero
let to_aff_point (p:proj_point) : aff_point =
// if is_proj_point_at_inf p then aff_point_at_inf
let (px, py, pz) = p in
let zinv = finv pz in
let x = px *% zinv in
let y = py *% zinv in
(x, y)
let to_proj_point (p:aff_point) : proj_point =
let (x, y) = p in (x, y, one)
// Base point | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Spec.K256.PointOps.felem | Prims.Tot | [
"total"
] | [] | [] | [] | false | false | false | true | false | let g_y:felem =
| 0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8 | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.is_on_curve | val is_on_curve : p: Spec.K256.PointOps.aff_point -> Prims.bool | let is_on_curve (p:aff_point) =
let x, y = p in y *% y = x *% x *% x +% b | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 43,
"end_line": 62,
"start_col": 0,
"start_line": 61
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q}
let qadd (x y:qelem) : qelem = (x + y) % q
let qmul (x y:qelem) : qelem = (x * y) % q
let qinv (x:qelem) : qelem = M.pow_mod #q x (q - 2)
let qnegate (x:qelem) : qelem = (- x) % q
let scalar_is_high (x:qelem) : bool = x > q / 2
let ( +^ ) = qadd
let ( *^ ) = qmul
/// Elliptic curve
let aff_point = felem & felem // Affine point
let proj_point = felem & felem & felem // Projective coordinates
// y * y = x * x * x + b
let b : felem = 7 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: Spec.K256.PointOps.aff_point -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"Spec.K256.PointOps.aff_point",
"Spec.K256.PointOps.felem",
"Prims.op_Equality",
"Spec.K256.PointOps.op_Star_Percent",
"Spec.K256.PointOps.op_Plus_Percent",
"Spec.K256.PointOps.b",
"Prims.bool"
] | [] | false | false | false | true | false | let is_on_curve (p: aff_point) =
| let x, y = p in
y *% y = x *% x *% x +% b | false |
|
Spec.K256.PointOps.fst | Spec.K256.PointOps.point_negate | val point_negate (p: proj_point) : proj_point | val point_negate (p: proj_point) : proj_point | let point_negate (p:proj_point) : proj_point =
let x, y, z = p in
x, (-y) % prime, z | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 20,
"end_line": 166,
"start_col": 0,
"start_line": 164
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q}
let qadd (x y:qelem) : qelem = (x + y) % q
let qmul (x y:qelem) : qelem = (x * y) % q
let qinv (x:qelem) : qelem = M.pow_mod #q x (q - 2)
let qnegate (x:qelem) : qelem = (- x) % q
let scalar_is_high (x:qelem) : bool = x > q / 2
let ( +^ ) = qadd
let ( *^ ) = qmul
/// Elliptic curve
let aff_point = felem & felem // Affine point
let proj_point = felem & felem & felem // Projective coordinates
// y * y = x * x * x + b
let b : felem = 7
let is_on_curve (p:aff_point) =
let x, y = p in y *% y = x *% x *% x +% b
let aff_point_at_inf : aff_point = (zero, zero) // not on the curve!
let point_at_inf : proj_point = (zero, one, zero)
let is_aff_point_at_inf (p:aff_point) : bool =
let (x, y) = p in x = zero && y = zero
let is_proj_point_at_inf (p:proj_point) : bool =
let (_, _, z) = p in z = zero
let to_aff_point (p:proj_point) : aff_point =
// if is_proj_point_at_inf p then aff_point_at_inf
let (px, py, pz) = p in
let zinv = finv pz in
let x = px *% zinv in
let y = py *% zinv in
(x, y)
let to_proj_point (p:aff_point) : proj_point =
let (x, y) = p in (x, y, one)
// Base point
let g_x : felem = 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798
let g_y : felem = 0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8
let g : proj_point = (g_x, g_y, one)
/// Point addition in affine coordinates
let aff_point_double (p:aff_point) : aff_point =
let (px, py) = p in
if is_aff_point_at_inf p then p
else begin
if py = 0 then aff_point_at_inf
else begin
let lambda = 3 *% px *% px /% (2 *% py) in
let rx = lambda *% lambda -% px -% px in
let ry = lambda *% (px -% rx) -% py in
(rx, ry) end
end
let aff_point_add (p:aff_point) (q:aff_point) : aff_point =
let (px, py) = p in let (qx, qy) = q in
if is_aff_point_at_inf p then q
else begin
if is_aff_point_at_inf q then p
else begin
if p = q then aff_point_double p
else begin
if qx = px then aff_point_at_inf
else begin
let lambda = (qy -% py) /% (qx -% px) in
let rx = lambda *% lambda -% px -% qx in
let ry = lambda *% (px -% rx) -% py in
(rx, ry)
end
end
end
end
let aff_point_negate (p:aff_point) : aff_point =
let x, y = p in x, (-y) % prime
/// Point addition and doubling in projective coordinates
let point_add (p:proj_point) (q:proj_point) : proj_point =
let x1, y1, z1 = p in
let x2, y2, z2 = q in
let xx = x1 *% x2 in
let yy = y1 *% y2 in
let zz = z1 *% z2 in
let xy_pairs = (x1 +% y1) *% (x2 +% y2) -% (xx +% yy) in
let yz_pairs = (y1 +% z1) *% (y2 +% z2) -% (yy +% zz) in
let xz_pairs = (x1 +% z1) *% (x2 +% z2) -% (xx +% zz) in
let bzz3 = 3 *% b *% zz in
let yy_m_bzz3 = yy -% bzz3 in
let yy_p_bzz3 = yy +% bzz3 in
let byz3 = 3 *% b *% yz_pairs in
let xx3 = 3 *% xx in
let bxx9 = 3 *% b *% xx3 in
let x3 = xy_pairs *% yy_m_bzz3 -% byz3 *% xz_pairs in
let y3 = yy_p_bzz3 *% yy_m_bzz3 +% bxx9 *% xz_pairs in
let z3 = yz_pairs *% yy_p_bzz3 +% xx3 *% xy_pairs in
x3, y3, z3
let point_double (p:proj_point) : proj_point =
let x, y, z = p in
let yy = y *% y in
let zz = z *% z in
let xy2 = 2 *% x *% y in
let bzz3 = 3 *% b *% zz in
let bzz9 = 3 *% bzz3 in
let yy_m_bzz9 = yy -% bzz9 in
let yy_p_bzz3 = yy +% bzz3 in
let yy_zz = yy *% zz in
let t = 24 *% b *% yy_zz in
let x3 = xy2 *% yy_m_bzz9 in
let y3 = yy_m_bzz9 *% yy_p_bzz3 +% t in
let z3 = yy *% y *% z *% 8 in
x3, y3, z3 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: Spec.K256.PointOps.proj_point -> Spec.K256.PointOps.proj_point | Prims.Tot | [
"total"
] | [] | [
"Spec.K256.PointOps.proj_point",
"Spec.K256.PointOps.felem",
"FStar.Pervasives.Native.Mktuple3",
"Prims.op_Modulus",
"Prims.op_Minus",
"Spec.K256.PointOps.prime"
] | [] | false | false | false | true | false | let point_negate (p: proj_point) : proj_point =
| let x, y, z = p in
x, (- y) % prime, z | false |
LowStar.BufferOps.fst | LowStar.BufferOps.op_Array_Access | val op_Array_Access : b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> i: FStar.UInt32.t -> FStar.HyperStack.ST.Stack a | let op_Array_Access (#a:Type0) (#rrel #rel:B.srel a) = B.index #a #rrel #rel | {
"file_name": "ulib/LowStar.BufferOps.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 76,
"end_line": 33,
"start_col": 0,
"start_line": 33
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.BufferOps
(* Handy notations for LowStar.Buffer, so users can open this module
instead of the whole LowStar.Buffer, to just bring these operators
and notations into the scope without bringing any definition from
LowStar.Buffer into the scope. *)
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module U32 = FStar.UInt32
module G = FStar.Ghost
module Seq = FStar.Seq
module B = LowStar.Buffer
module L = FStar.List.Tot
inline_for_extraction | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.BufferOps.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> i: FStar.UInt32.t -> FStar.HyperStack.ST.Stack a | FStar.HyperStack.ST.Stack | [] | [] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.index",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"LowStar.Monotonic.Buffer.live",
"Prims.b2t",
"Prims.op_LessThan",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.length",
"Prims.eq2",
"FStar.Seq.Base.index",
"LowStar.Monotonic.Buffer.as_seq"
] | [] | false | true | false | false | false | let ( .() ) (#a: Type0) (#rrel #rel: B.srel a) =
| B.index #a #rrel #rel | false |
|
LowStar.BufferOps.fst | LowStar.BufferOps.op_Array_Assignment | val op_Array_Assignment : b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> i: FStar.UInt32.t -> v: a
-> FStar.HyperStack.ST.Stack Prims.unit | let op_Array_Assignment (#a:Type0) (#rrel #rel:B.srel a) = B.upd #a #rrel #rel | {
"file_name": "ulib/LowStar.BufferOps.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 78,
"end_line": 37,
"start_col": 0,
"start_line": 37
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.BufferOps
(* Handy notations for LowStar.Buffer, so users can open this module
instead of the whole LowStar.Buffer, to just bring these operators
and notations into the scope without bringing any definition from
LowStar.Buffer into the scope. *)
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module U32 = FStar.UInt32
module G = FStar.Ghost
module Seq = FStar.Seq
module B = LowStar.Buffer
module L = FStar.List.Tot
inline_for_extraction
unfold
let op_Array_Access (#a:Type0) (#rrel #rel:B.srel a) = B.index #a #rrel #rel
inline_for_extraction | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.BufferOps.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> i: FStar.UInt32.t -> v: a
-> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.upd",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"Prims.unit",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"LowStar.Monotonic.Buffer.live",
"Prims.b2t",
"Prims.op_LessThan",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.length",
"LowStar.Monotonic.Buffer.as_seq",
"FStar.Seq.Base.upd",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_buffer",
"Prims.eq2",
"FStar.Seq.Base.seq"
] | [] | false | true | false | false | false | let ( .()<- ) (#a: Type0) (#rrel #rel: B.srel a) =
| B.upd #a #rrel #rel | false |
|
Spec.K256.PointOps.fst | Spec.K256.PointOps.recover_y | val recover_y (x: felem) (is_odd: bool) : option felem | val recover_y (x: felem) (is_odd: bool) : option felem | let recover_y (x:felem) (is_odd:bool) : option felem =
let y2 = x *% x *% x +% b in
let y = fsqrt y2 in
if y *% y <> y2 then None
else begin
let y = if is_fodd y <> is_odd then (prime - y) % prime else y in
Some y end | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 14,
"end_line": 212,
"start_col": 0,
"start_line": 206
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q}
let qadd (x y:qelem) : qelem = (x + y) % q
let qmul (x y:qelem) : qelem = (x * y) % q
let qinv (x:qelem) : qelem = M.pow_mod #q x (q - 2)
let qnegate (x:qelem) : qelem = (- x) % q
let scalar_is_high (x:qelem) : bool = x > q / 2
let ( +^ ) = qadd
let ( *^ ) = qmul
/// Elliptic curve
let aff_point = felem & felem // Affine point
let proj_point = felem & felem & felem // Projective coordinates
// y * y = x * x * x + b
let b : felem = 7
let is_on_curve (p:aff_point) =
let x, y = p in y *% y = x *% x *% x +% b
let aff_point_at_inf : aff_point = (zero, zero) // not on the curve!
let point_at_inf : proj_point = (zero, one, zero)
let is_aff_point_at_inf (p:aff_point) : bool =
let (x, y) = p in x = zero && y = zero
let is_proj_point_at_inf (p:proj_point) : bool =
let (_, _, z) = p in z = zero
let to_aff_point (p:proj_point) : aff_point =
// if is_proj_point_at_inf p then aff_point_at_inf
let (px, py, pz) = p in
let zinv = finv pz in
let x = px *% zinv in
let y = py *% zinv in
(x, y)
let to_proj_point (p:aff_point) : proj_point =
let (x, y) = p in (x, y, one)
// Base point
let g_x : felem = 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798
let g_y : felem = 0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8
let g : proj_point = (g_x, g_y, one)
/// Point addition in affine coordinates
let aff_point_double (p:aff_point) : aff_point =
let (px, py) = p in
if is_aff_point_at_inf p then p
else begin
if py = 0 then aff_point_at_inf
else begin
let lambda = 3 *% px *% px /% (2 *% py) in
let rx = lambda *% lambda -% px -% px in
let ry = lambda *% (px -% rx) -% py in
(rx, ry) end
end
let aff_point_add (p:aff_point) (q:aff_point) : aff_point =
let (px, py) = p in let (qx, qy) = q in
if is_aff_point_at_inf p then q
else begin
if is_aff_point_at_inf q then p
else begin
if p = q then aff_point_double p
else begin
if qx = px then aff_point_at_inf
else begin
let lambda = (qy -% py) /% (qx -% px) in
let rx = lambda *% lambda -% px -% qx in
let ry = lambda *% (px -% rx) -% py in
(rx, ry)
end
end
end
end
let aff_point_negate (p:aff_point) : aff_point =
let x, y = p in x, (-y) % prime
/// Point addition and doubling in projective coordinates
let point_add (p:proj_point) (q:proj_point) : proj_point =
let x1, y1, z1 = p in
let x2, y2, z2 = q in
let xx = x1 *% x2 in
let yy = y1 *% y2 in
let zz = z1 *% z2 in
let xy_pairs = (x1 +% y1) *% (x2 +% y2) -% (xx +% yy) in
let yz_pairs = (y1 +% z1) *% (y2 +% z2) -% (yy +% zz) in
let xz_pairs = (x1 +% z1) *% (x2 +% z2) -% (xx +% zz) in
let bzz3 = 3 *% b *% zz in
let yy_m_bzz3 = yy -% bzz3 in
let yy_p_bzz3 = yy +% bzz3 in
let byz3 = 3 *% b *% yz_pairs in
let xx3 = 3 *% xx in
let bxx9 = 3 *% b *% xx3 in
let x3 = xy_pairs *% yy_m_bzz3 -% byz3 *% xz_pairs in
let y3 = yy_p_bzz3 *% yy_m_bzz3 +% bxx9 *% xz_pairs in
let z3 = yz_pairs *% yy_p_bzz3 +% xx3 *% xy_pairs in
x3, y3, z3
let point_double (p:proj_point) : proj_point =
let x, y, z = p in
let yy = y *% y in
let zz = z *% z in
let xy2 = 2 *% x *% y in
let bzz3 = 3 *% b *% zz in
let bzz9 = 3 *% bzz3 in
let yy_m_bzz9 = yy -% bzz9 in
let yy_p_bzz3 = yy +% bzz3 in
let yy_zz = yy *% zz in
let t = 24 *% b *% yy_zz in
let x3 = xy2 *% yy_m_bzz9 in
let y3 = yy_m_bzz9 *% yy_p_bzz3 +% t in
let z3 = yy *% y *% z *% 8 in
x3, y3, z3
let point_negate (p:proj_point) : proj_point =
let x, y, z = p in
x, (-y) % prime, z
/// Point conversion between affine, projective and bytes representation
let aff_point_load (b:BSeq.lbytes 64) : option aff_point =
let pk_x = BSeq.nat_from_bytes_be (sub b 0 32) in
let pk_y = BSeq.nat_from_bytes_be (sub b 32 32) in
let is_x_valid = pk_x < prime in
let is_y_valid = pk_y < prime in
let is_xy_on_curve =
if is_x_valid && is_y_valid then is_on_curve (pk_x, pk_y) else false in
if is_xy_on_curve then Some (pk_x, pk_y) else None
let load_point (b:BSeq.lbytes 64) : option proj_point =
match (aff_point_load b) with
| Some p -> Some (to_proj_point p)
| None -> None
let point_inv_bytes (b:BSeq.lbytes 64) =
let px = BSeq.nat_from_bytes_be (sub b 0 32) in
let py = BSeq.nat_from_bytes_be (sub b 32 32) in
px < prime && py < prime && is_on_curve (px, py)
let load_point_nocheck (b:BSeq.lbytes 64{point_inv_bytes b}) : proj_point =
let px = BSeq.nat_from_bytes_be (sub b 0 32) in
let py = BSeq.nat_from_bytes_be (sub b 32 32) in
to_proj_point (px, py)
let aff_point_store (p:aff_point) : BSeq.lbytes 64 =
let (px, py) = p in
let pxb = BSeq.nat_to_bytes_be 32 px in
let pxy = BSeq.nat_to_bytes_be 32 py in
concat #uint8 #32 #32 pxb pxy
let point_store (p:proj_point) : BSeq.lbytes 64 =
aff_point_store (to_aff_point p) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: Spec.K256.PointOps.felem -> is_odd: Prims.bool
-> FStar.Pervasives.Native.option Spec.K256.PointOps.felem | Prims.Tot | [
"total"
] | [] | [
"Spec.K256.PointOps.felem",
"Prims.bool",
"Prims.op_disEquality",
"Spec.K256.PointOps.op_Star_Percent",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.Some",
"Spec.K256.PointOps.is_fodd",
"Prims.op_Modulus",
"Prims.op_Subtraction",
"Spec.K256.PointOps.prime",
"FStar.Pervasives.Native.option",
"Spec.K256.PointOps.fsqrt",
"Spec.K256.PointOps.op_Plus_Percent",
"Spec.K256.PointOps.b"
] | [] | false | false | false | true | false | let recover_y (x: felem) (is_odd: bool) : option felem =
| let y2 = x *% x *% x +% b in
let y = fsqrt y2 in
if y *% y <> y2
then None
else
let y = if is_fodd y <> is_odd then (prime - y) % prime else y in
Some y | false |
Spec.K256.PointOps.fst | Spec.K256.PointOps.is_proj_point_at_inf | val is_proj_point_at_inf (p: proj_point) : bool | val is_proj_point_at_inf (p: proj_point) : bool | let is_proj_point_at_inf (p:proj_point) : bool =
let (_, _, z) = p in z = zero | {
"file_name": "specs/Spec.K256.PointOps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 31,
"end_line": 71,
"start_col": 0,
"start_line": 70
} | module Spec.K256.PointOps
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
module M = Lib.NatMod
module BSeq = Lib.ByteSequence
#set-options "--z3rlimit 30 --fuel 0 --ifuel 0"
/// Finite field
let prime : (p:pos{p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F}) =
assert_norm (24 < pow2 256 - 0x1000003D1);
assert_norm (pow2 256 - 0x1000003D1 = pow2 256 - pow2 32 - pow2 9 - pow2 8 - pow2 7 - pow2 6 - pow2 4 - 1);
assert_norm (pow2 256 - 0x1000003D1 = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F);
pow2 256 - 0x1000003D1
let felem = x:nat{x < prime}
let zero : felem = 0
let one : felem = 1
let fadd (x y:felem) : felem = (x + y) % prime
let fsub (x y:felem) : felem = (x - y) % prime
let fmul (x y:felem) : felem = (x * y) % prime
let finv (x:felem) : felem = M.pow_mod #prime x (prime - 2)
let fsqrt (x:felem) : felem = M.pow_mod #prime x ((prime + 1) / 4)
let is_fodd (x:nat) : bool = x % 2 = 1
let ( +% ) = fadd
let ( -% ) = fsub
let ( *% ) = fmul
let ( /% ) (x y:felem) = x *% finv y
/// Scalar field
// Group order
let q : q:pos{q < pow2 256} =
assert_norm (0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 < pow2 256);
0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
let qelem = x:nat{x < q}
let qadd (x y:qelem) : qelem = (x + y) % q
let qmul (x y:qelem) : qelem = (x * y) % q
let qinv (x:qelem) : qelem = M.pow_mod #q x (q - 2)
let qnegate (x:qelem) : qelem = (- x) % q
let scalar_is_high (x:qelem) : bool = x > q / 2
let ( +^ ) = qadd
let ( *^ ) = qmul
/// Elliptic curve
let aff_point = felem & felem // Affine point
let proj_point = felem & felem & felem // Projective coordinates
// y * y = x * x * x + b
let b : felem = 7
let is_on_curve (p:aff_point) =
let x, y = p in y *% y = x *% x *% x +% b
let aff_point_at_inf : aff_point = (zero, zero) // not on the curve!
let point_at_inf : proj_point = (zero, one, zero)
let is_aff_point_at_inf (p:aff_point) : bool =
let (x, y) = p in x = zero && y = zero | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.RawIntTypes.fsti.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.K256.PointOps.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: Spec.K256.PointOps.proj_point -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"Spec.K256.PointOps.proj_point",
"Spec.K256.PointOps.felem",
"Prims.op_Equality",
"Spec.K256.PointOps.zero",
"Prims.bool"
] | [] | false | false | false | true | false | let is_proj_point_at_inf (p: proj_point) : bool =
| let _, _, z = p in
z = zero | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.